/appsavocabulary/management/commands/vocabulary_backup.py
Python | 173 lines | 121 code | 16 blank | 36 comment | 35 complexity | 15cab60df57f2cafe55fded304fc5f32 MD5 | raw file
- # import form django
- from django.core import management
- from django.core.management.base import BaseCommand, CommandError
- # import python
- import datetime
- import glob
- import importlib
- import inspect
- import json
- # import annot
- from appsavocabulary.models import SysAdminVocabulary
- class Command( BaseCommand ):
- args = "<apponmmm_source apponmmm_source ...>"
- help = "Backup controlled vocabulary."
- def handle(self, *args, **options):
- # initiate
- b_flag = False
- queryset = SysAdminVocabulary.objects.all() # get queryset
- ls_queryset = []
- ls_apponfilter = []
-
- # load filter_vocabulary_getupdate.txt
- self.stdout.write("Load vocabulary_getupdate filter" )
- with open( "appsavocabulary/filter_vocabulary.txt", 'r' ) as f:
- for s_line in f:
- s_line = s_line.strip()
- if ( s_line != "" ):
- ls_apponfilter.append(s_line)
- self.stdout.write( "Filter: " + str(ls_apponfilter) )
-
- # treat argument specified vocabulary
- if ( len( args ) > 0 ):
- # get vocabularies app
- ls_appon = list( args )
- for obj_n in queryset:
- # check if vocabulary app in vocabulary table exist
- if ( str( obj_n.app ) in ls_appon):
- ls_queryset.append( str( obj_n.app ) ) # populate queryset list
- # treat all vocabularies
- else:
- # get vocabularies app
- ls_appon = glob.glob( "appon*" )
- ls_appon.sort()
- # reset queryset
- self.stdout.write( "Reset vocabulary table" )
- for obj_n in queryset:
- # check if vocabulary app in vocabulary table exist
- if ( str( obj_n.app ) in ls_appon):
- ls_queryset.append( str( obj_n.app ) ) # populate queryset list
- else:
- obj_n.delete() # delete vocabulary out of table
- # process vocabulary app
- for s_appon in ls_appon:
- # python manage.py vocabulary_getupdate s_appon
- management.call_command( "vocabulary_loadupdate", s_appon, )
- # begin backup handle
- self.stdout.write( "\nProcessing vocabulary backup: "+s_appon )
-
- # check process filter
- if ( s_appon in ls_apponfilter ):
- # don't process this vocabulary
- self.stdout.write( "WARNING: vocabulary will not be backuped because it is listed in appsavocabulary/filter_vocabulary.txt: "+s_appon )
-
- else:
- # load latest database version
- try:
- m_appon = importlib.import_module( s_appon+".models" )
- for s_name, obj in inspect.getmembers( m_appon ):
- if inspect.isclass( obj ):
- s_apponmodels = s_name
- c_apponmodels = obj # eqivalent to json model
- lo_record = c_apponmodels.objects.all( )
- self.stdout.write( "Load database version: "+s_apponmodels )
- except ImportError:
- raise CommandError( "Annot vocabulary app %s does not exist." % s_appon )
- # find latest json file backup version
- self.stdout.write( "Search latest json file version..." )
- obj_n = SysAdminVocabulary.objects.get( app=s_appon )
- s_vocabulary = obj_n.vocabulary
- s_version_latest = obj_n.version_latest
- s_regexfixture = s_appon+"/fixtures/*_"+s_vocabulary+"_"+s_version_latest+"_backup.json"
- ls_backuppathfile = glob.glob( s_regexfixture )
- i_latest = 0
- s_backuplatest = None
- for s_backuppathfile in ls_backuppathfile:
- s_backupfile = s_backuppathfile.split('/')[-1]
- s_date = s_backupfile.split('_')[0]
- i_date = int( s_date )
- if ( i_latest < i_date ):
- s_backuplatest = s_backuppathfile
- i_latest = i_date
- # generate future backup path file name
- s_today = str(datetime.date.today())
- s_today = s_today.replace( '-','' )
- s_backuptoday = s_appon+"/fixtures/"+s_today+"_"+s_vocabulary+"_"+s_version_latest+"_backup.json"
- # if backup found
- if not ( s_backuplatest == None ):
- # load latest json file backup version
- self.stdout.write( "Load latest json file version: "+s_backuplatest )
- f_backuplatest = open( s_backuplatest, 'r' ) # open file handle
- ld_json = json.load( f_backuplatest )
- # compare database content to latest back up contnet
- self.stdout.write( "Compare database conetent to lates backup content.")
- # check every database version term if it is a member of the latest json file version
- for o_record in lo_record :
- b_record = False
- s_record_term_name = o_record.term_name
- s_record_term_id = o_record.term_id
- s_record_annot_id = o_record.annot_id
- s_record_term_source_version_responsible = o_record.term_source_version_responsible
- s_record_term_source_version_update = str( o_record.term_source_version_update )
- s_record_term_source_version = o_record.term_source_version
- s_record_term_ok = o_record.term_ok
- for d_json in ld_json :
- # annot_id
- s_file_annot_id = d_json["pk"]
- if ( s_file_annot_id == s_record_annot_id ):
- # term_name
- if not ( s_record_term_name == d_json["fields"]["term_name"] ):
- b_flag = True
- # term_id
- if not ( s_record_term_id == d_json["fields"]["term_id"] ):
- b_flag = True
- # term_source_version_responsible
- if not ( s_record_term_source_version_responsible == d_json["fields"]["term_source_version_responsible"] ):
- b_flag = True
- # term_source_version_update
- if not ( s_record_term_source_version_update == d_json["fields"]["term_source_version_update"] ):
- b_flag = True
- # term_source_version
- if not ( s_record_term_source_version == d_json["fields"]["term_source_version"] ):
- b_flag = True
- # term_ok
- if not ( s_record_term_ok == d_json["fields"]["term_ok"] ):
- b_flag = True
- # record ok
- b_record = True
- break
- if not ( b_record ):
- b_flag = True
- # check every latest json file version if it is a member of the database version
- for d_json in ld_json :
- s_file_annot_id = d_json["pk"]
- try:
- o_record = c_apponmodels.objects.get( annot_id=s_file_annot_id )
- except:
- b_flag = True
- # if no backup file found
- else:
- self.stdout.write( "No earlyer backup." )
- b_flag = True
- # generate backupfile
- self.stdout.write( "Backup needed? "+str(b_flag) )
- if ( b_flag ):
- # python manage.py dumpdata s_backuptoday
- self.stdout.write( "Generate backup file: "+s_backuptoday )
- f_stream = open( s_backuptoday, 'w' ) # encoding="utf-8"
- management.call_command( "dumpdata", s_appon, indent=4, format='json',stdout=f_stream )
- f_stream.close()
- else:
- self.stdout.write( "Latest backup version is equivalent to the database version. Backup skiped.")