add argument parsing plus refactoring
This commit is contained in:
parent
ef2ca3d70d
commit
300b1c9ef0
116
import_media.py
116
import_media.py
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python
|
||||||
|
|
||||||
'''
|
'''
|
||||||
Import photos from SD card into folder with todays date + nickname
|
Import photos from SD card into folder with todays date + nickname
|
||||||
|
@ -16,7 +16,6 @@ TODO:
|
||||||
'''
|
'''
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
import argparse
|
import argparse
|
||||||
import shutil
|
import shutil
|
||||||
|
@ -32,53 +31,87 @@ files = {}
|
||||||
|
|
||||||
# Read configuration from file
|
# Read configuration from file
|
||||||
try:
|
try:
|
||||||
with open(CONFIG_FILE, 'r') as f:
|
with open(CONFIG_FILE, 'r') as cf:
|
||||||
config = yaml.load(f, Loader=yaml.FullLoader)
|
config = yaml.load(cf, Loader=yaml.FullLoader)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
print("Configuration file not found: ", CONFIG_FILE)
|
print("Configuration file not found: ", CONFIG_FILE)
|
||||||
print("Copy config.yaml.EXAMPLE to ", CONFIG_FILE, " and update accordingly.")
|
print("Copy config.yaml.EXAMPLE to ", CONFIG_FILE, " and update accordingly.")
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument("-e", "--event", help = "Event Name")
|
parser.add_argument("-e", "--event", help = "Event Name")
|
||||||
|
parser.add_argument("-s", "--source", help = "Source Directory to search for files")
|
||||||
|
parser.add_argument("-d", "--destination", help = "Destination Directory to put files")
|
||||||
|
parser.add_argument("-o", "--create-originals", help = "For images only, create an originals \
|
||||||
|
folder for safe keeping")
|
||||||
|
parser.add_argument("-b", "--backup-destination", help = "Create a backup of everything at the \
|
||||||
|
specified location")
|
||||||
|
parser.add_argument("-D", "--delete-source-files", help = "Delete files from SD after validating \
|
||||||
|
checksum of copied files")
|
||||||
|
parser.add_argument("-c", "--config", help = "Load the specified config file instead \
|
||||||
|
of the default " + CONFIG_FILE)
|
||||||
|
parser.add_argument("-g", "--generate-config", help = "Generate config file based on options \
|
||||||
|
passed from command arguments")
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
if args.event:
|
if args.event:
|
||||||
event = args.event
|
event = args.event
|
||||||
|
if args.source:
|
||||||
|
config['folders']['source']['base'] = args.source
|
||||||
|
if args.destination:
|
||||||
|
config['folders']['destination']['base'] = args.source
|
||||||
|
#if args.create-oringinals:
|
||||||
|
# pass
|
||||||
|
#if args.backup-destination:
|
||||||
|
# pass
|
||||||
|
#if args.delete-source-files:
|
||||||
|
# pass
|
||||||
|
#if args.config:
|
||||||
|
# pass
|
||||||
|
#if args.generate-config:
|
||||||
|
# pass
|
||||||
|
|
||||||
def dump_yaml(dictionary, file):
|
def dump_yaml(dictionary, file):
|
||||||
""" dump a dictionary to a yaml file """
|
""" dump a dictionary to a yaml file """
|
||||||
with open(file, 'w') as f:
|
with open(file, 'w') as f:
|
||||||
yaml.dump(dictionary, f)
|
yaml.dump(dictionary, f)
|
||||||
|
|
||||||
def md5_hash(f):
|
def md5_hash(file):
|
||||||
""" calculates and returns md5 hash """
|
""" calculates and returns md5 hash """
|
||||||
#print("calculating md5 for ", f)
|
#print("calculating md5 for ", f)
|
||||||
md5 = hashlib.md5(open(f, 'rb').read()).hexdigest()
|
md5 = hashlib.md5(open(file, 'rb').read()).hexdigest()
|
||||||
|
#with open(file, 'r') as f:
|
||||||
|
# md5 = hashlib.md5(f).hexdigest()
|
||||||
return md5
|
return md5
|
||||||
|
|
||||||
def cmp_files(f1,f2):
|
def cmp_files(file_1,file_2):
|
||||||
""" Use file hashes to compare files """
|
""" Use file hashes to compare files """
|
||||||
return md5_hash(f1) == md5_hash(f2)
|
return md5_hash(file_1) == md5_hash(file_2)
|
||||||
|
|
||||||
def get_capture_date(p, t):
|
def get_capture_date(path, f_type):
|
||||||
""" get capture date from meta """
|
""" get capture date from meta """
|
||||||
if t == 'image':
|
if f_type == 'image':
|
||||||
with open(p, 'rb') as f:
|
with open(path, 'rb') as file:
|
||||||
tags = exifread.process_file(f)
|
tags = exifread.process_file(file)
|
||||||
|
if 'EXIF DateTimeOriginal' in tags:
|
||||||
stamp = datetime.strptime(str(tags['EXIF DateTimeOriginal']), '%Y:%m:%d %H:%M:%S')
|
stamp = datetime.strptime(str(tags['EXIF DateTimeOriginal']), '%Y:%m:%d %H:%M:%S')
|
||||||
elif t == 'video':
|
elif 'Image DateTime' in tags:
|
||||||
stamp = datetime.strptime(ffmpeg.probe(p)['format']['tags']['creation_time'],
|
stamp = datetime.strptime(str(tags['Image DateTime']), '%Y:%m:%d %H:%M:%S')
|
||||||
|
else:
|
||||||
|
stamp = datetime.strptime(str('1900:01:01 00:00:00'), '%Y:%m:%d %H:%M:%S')
|
||||||
|
elif f_type == 'video':
|
||||||
|
stamp = datetime.strptime(ffmpeg.probe(path)['format']['tags']['creation_time'],
|
||||||
'%Y-%m-%dT%H:%M:%S.%f%z')
|
'%Y-%m-%dT%H:%M:%S.%f%z')
|
||||||
elif t == 'audio':
|
elif f_type == 'audio':
|
||||||
stamp = datetime.strptime(ffmpeg.probe(p)['format']['tags']['date'], '%Y-%m-%d')
|
stamp = datetime.strptime(ffmpeg.probe(path)['format']['tags']['date'], '%Y-%m-%d')
|
||||||
else:
|
else:
|
||||||
stamp = datetime.fromtimestamp(os.path.getctime(p))
|
stamp = datetime.fromtimestamp(os.path.getctime(path))
|
||||||
|
|
||||||
year = stamp.strftime("%Y")
|
year = stamp.strftime("%Y")
|
||||||
month = stamp.strftime("%m")
|
month = stamp.strftime("%m")
|
||||||
day = stamp.strftime("%d")
|
day = stamp.strftime("%d")
|
||||||
return year, month, day
|
return year, month, day
|
||||||
|
|
||||||
def path_exists(path):
|
def path_exists(path):
|
||||||
""" Does the path exist """
|
""" Does the path exist """
|
||||||
return os.path.exists(path)
|
return os.path.exists(path)
|
||||||
|
@ -88,9 +121,10 @@ def is_dir(path):
|
||||||
p_exists = path_exists(path)
|
p_exists = path_exists(path)
|
||||||
|
|
||||||
if p_exists is True:
|
if p_exists is True:
|
||||||
return os.path.isdir(path)
|
it_is_dir = os.path.isdir(path)
|
||||||
else:
|
else:
|
||||||
return p_exists
|
it_is_dir = p_exists
|
||||||
|
return it_is_dir
|
||||||
|
|
||||||
def path_access_read(path):
|
def path_access_read(path):
|
||||||
""" make sure we can read from the path """
|
""" make sure we can read from the path """
|
||||||
|
@ -110,11 +144,11 @@ def path_access_write(path):
|
||||||
|
|
||||||
return val
|
return val
|
||||||
|
|
||||||
def create_folder(f):
|
def create_folder(file):
|
||||||
""" Function to create folder """
|
""" Function to create folder """
|
||||||
if path_exists(f) is False:
|
if path_exists(file) is False:
|
||||||
os.makedirs(f)
|
os.makedirs(file)
|
||||||
elif is_dir(f) is False:
|
elif is_dir(file) is False:
|
||||||
pass # this needs to turn into bailing out as there is a collision.
|
pass # this needs to turn into bailing out as there is a collision.
|
||||||
|
|
||||||
def copy_from_source(source_path,dest_path,file_name):
|
def copy_from_source(source_path,dest_path,file_name):
|
||||||
|
@ -126,7 +160,7 @@ def copy_from_source(source_path,dest_path,file_name):
|
||||||
check_match = cmp_files(os.path.join(source_path,file_name),
|
check_match = cmp_files(os.path.join(source_path,file_name),
|
||||||
os.path.join(dest_path, file_name))
|
os.path.join(dest_path, file_name))
|
||||||
if check_match is False:
|
if check_match is False:
|
||||||
print(f'Found duplicate for {p}, renaming destination with md5 appended.')
|
print(f'Found duplicate for {source_path}, renaming destination with md5 appended.')
|
||||||
base, extension = os.path.splitext(file_name)
|
base, extension = os.path.splitext(file_name)
|
||||||
md5 = md5_hash(os.path.join(dest_path, file_name))
|
md5 = md5_hash(os.path.join(dest_path, file_name))
|
||||||
file_name_hash = base + '_' + md5 + extension
|
file_name_hash = base + '_' + md5 + extension
|
||||||
|
@ -157,6 +191,7 @@ def process_file(path, f_type, f_name, ext):
|
||||||
files[i]['date']['m'] = files[i]['date']['capture_date'][1]
|
files[i]['date']['m'] = files[i]['date']['capture_date'][1]
|
||||||
files[i]['date']['d'] = files[i]['date']['capture_date'][2]
|
files[i]['date']['d'] = files[i]['date']['capture_date'][2]
|
||||||
|
|
||||||
|
|
||||||
if event:
|
if event:
|
||||||
files[i]['folders']['destination'] = config['folders']['destination']['base'] + \
|
files[i]['folders']['destination'] = config['folders']['destination']['base'] + \
|
||||||
'/' + files[i]['date']['y'] + '/' + \
|
'/' + files[i]['date']['y'] + '/' + \
|
||||||
|
@ -179,15 +214,16 @@ def process_file(path, f_type, f_name, ext):
|
||||||
files[i]['folders']['destination'] = files[i]['folders']['destination'] + '/PHOTO'
|
files[i]['folders']['destination'] = files[i]['folders']['destination'] + '/PHOTO'
|
||||||
|
|
||||||
if files[i]['extension'] in ('jpg', 'jpeg'):
|
if files[i]['extension'] in ('jpg', 'jpeg'):
|
||||||
|
if config['store_originals'] is True:
|
||||||
|
files[i]['folders']['destination_original'] = files[i]['folders']['destination'] + \
|
||||||
|
'/ORIGINALS/JPG'
|
||||||
files[i]['folders']['destination'] = files[i]['folders']['destination'] + \
|
files[i]['folders']['destination'] = files[i]['folders']['destination'] + \
|
||||||
'/JPG'
|
'/JPG'
|
||||||
if config['store_originals'] is True:
|
|
||||||
files[i]['folders']['destination_original'] = files[i]['folders']['destination'] + '/ORIGINALS/JPG'
|
|
||||||
else:
|
else:
|
||||||
files[i]['folders']['destination'] = files[i]['folders']['destination'] + '/RAW'
|
|
||||||
|
|
||||||
if config['store_originals'] is True:
|
if config['store_originals'] is True:
|
||||||
files[i]['folders']['destination_original'] = files[i]['folders']['destination'] + '/ORIGINALS/RAW'
|
files[i]['folders']['destination_original'] = files[i]['folders']['destination'] + \
|
||||||
|
'/ORIGINALS/RAW'
|
||||||
|
files[i]['folders']['destination'] = files[i]['folders']['destination'] + '/RAW'
|
||||||
|
|
||||||
elif files[i]['type'] == 'video':
|
elif files[i]['type'] == 'video':
|
||||||
files[i]['folders']['destination'] = files[i]['folders']['destination'] + '/VIDEO'
|
files[i]['folders']['destination'] = files[i]['folders']['destination'] + '/VIDEO'
|
||||||
|
@ -196,7 +232,8 @@ def process_file(path, f_type, f_name, ext):
|
||||||
files[i]['folders']['destination'] = files[i]['folders']['destination'] + '/AUDIO'
|
files[i]['folders']['destination'] = files[i]['folders']['destination'] + '/AUDIO'
|
||||||
|
|
||||||
else:
|
else:
|
||||||
print('WARN: ', files[i]['type'], ' is not a known type and you never should have landed here.')
|
print('WARN: ', files[i]['type'], \
|
||||||
|
' is not a known type and you never should have landed here.')
|
||||||
|
|
||||||
def find_files(directory):
|
def find_files(directory):
|
||||||
""" find files to build a dictionary out of """
|
""" find files to build a dictionary out of """
|
||||||
|
@ -205,26 +242,28 @@ def find_files(directory):
|
||||||
for ext in config['file_types'][f_type]:
|
for ext in config['file_types'][f_type]:
|
||||||
for file in tqdm(filename, desc = 'Finding ' + ext + ' Files', ncols = 100):
|
for file in tqdm(filename, desc = 'Finding ' + ext + ' Files', ncols = 100):
|
||||||
if file.lower().endswith(ext):
|
if file.lower().endswith(ext):
|
||||||
|
print(file)
|
||||||
process_file(folder, f_type, file, ext)
|
process_file(folder, f_type, file, ext)
|
||||||
|
|
||||||
def validate_config_dir_access():
|
def validate_config_dir_access():
|
||||||
""" Validate we can op in the defined directories """
|
""" Validate we can op in the defined directories """
|
||||||
check = path_access_write(config['folders']['destination']['base'])
|
check = path_access_write(config['folders']['destination']['base'])
|
||||||
if check is False:
|
if check is False:
|
||||||
return False
|
writable = False
|
||||||
else:
|
else:
|
||||||
check = path_access_read(config['folders']['source']['base'])
|
check = path_access_read(config['folders']['source']['base'])
|
||||||
if check is False:
|
if check is False:
|
||||||
return False
|
writable = False
|
||||||
else:
|
else:
|
||||||
if config['store_backup'] is True:
|
if config['store_backup'] is True:
|
||||||
check = path_access_write(config['folders']['backup'])
|
check = path_access_write(config['folders']['backup'])
|
||||||
if check is False:
|
if check is False:
|
||||||
return False
|
writable = False
|
||||||
else:
|
else:
|
||||||
return True
|
writable = True
|
||||||
else:
|
else:
|
||||||
return True
|
writable = True
|
||||||
|
return writable
|
||||||
|
|
||||||
def copy_files():
|
def copy_files():
|
||||||
""" Copy Files. """
|
""" Copy Files. """
|
||||||
|
@ -278,8 +317,8 @@ def cleanup_sd():
|
||||||
if files[file]['source_cleanable'] is True:
|
if files[file]['source_cleanable'] is True:
|
||||||
os.remove(os.path.join(files[file]['folders']['source_path'],files[file]['name']))
|
os.remove(os.path.join(files[file]['folders']['source_path'],files[file]['name']))
|
||||||
|
|
||||||
go = validate_config_dir_access()
|
GO = validate_config_dir_access()
|
||||||
if go is True:
|
if GO is True:
|
||||||
find_files(config['folders']['source']['base'])
|
find_files(config['folders']['source']['base'])
|
||||||
copy_files()
|
copy_files()
|
||||||
gen_hashes()
|
gen_hashes()
|
||||||
|
@ -288,5 +327,6 @@ if go is True:
|
||||||
else:
|
else:
|
||||||
print("There was a problem accessing one or more directories defined in the configuration.")
|
print("There was a problem accessing one or more directories defined in the configuration.")
|
||||||
|
|
||||||
|
|
||||||
dump_yaml(files, 'files_dict.yaml')
|
dump_yaml(files, 'files_dict.yaml')
|
||||||
print('done.')
|
print('done.')
|
||||||
|
|
Loading…
Reference in New Issue