In a Folder, I have 100 files with extensions '.txt','.doc','.pdf'. I need to rename the files as:
If the filename ends with '.txt' -> replace filename ends with '.jpg'
If the filename ends with '.doc' -> replace filename ends with '.mp3'
If the filename ends with '.pdf' -> replace filename ends with '.mp4'
I have tried this one so far
import os,sys
folder ="C:/Users/TestFolder"
for filename in os.listdir(folder):
base_file, ext = os.path.splitext(filename)
print(ext)
if ext == '.txt':
print("------")
print(filename)
print(base_file)
print(ext)
os.rename(filename, base_file + '.jpg')
elif ext == '.doc':
print("------")
os.rename(filename, base_file + '.mp3')
elif ext == '.pdf':
print("------")
os.rename(filename, base_file + '.mp4')
else:
print("Not found")
To begin with , you can store your mappings in a dictionary, then when you are iterating over your folder, and you find the extension, just use the mapping to make the new file name and save it.
import os
folder ="C:/Users/TestFolder"
#Dictionary for extension mappings
rename_dict = {'txt': 'jpg', 'doc': 'mp3', 'pdf': 'mp4'}
for filename in os.listdir(folder):
#Get the extension and remove . from it
base_file, ext = os.path.splitext(filename)
ext = ext.replace('.','')
#If you find the extension to rename
if ext in rename_dict:
#Create the new file name
new_ext = rename_dict[ext]
new_file = base_file + '.' + new_ext
#Create the full old and new path
old_path = os.path.join(folder, filename)
new_path = os.path.join(folder, new_file)
#Rename the file
os.rename(old_path, new_path)
Related
i need to ask if possible and how get on same time the sum of list files in directory and subdirectory with fnmatch filter and files adress.
I use for now this:
def return_ext():
file_pasok = ["AAAAA.txt", "BBBBBB.txt"]
for i in range(len(file_pasok)):
for ext_f in file_pasok:
return ext_f
def list_files(file_path):
ext = return_ext()
for _, dirnames, filenames in os.walk(file_path):
if not filenames:
continue
for file in fnmatch.filter(filenames, ext):
file_found_str = Path(os.path.join(_, file))
file_found = str(file_found_str)
yield file_found
ext = return_ext()
########GOT HOW MANY FILE FOUND
count_founded = sum([len(fnmatch.filter(files, ext)) for r, d, files in os.walk(file_path)])
########GOT LIST ADRESS FILE FOUND
for file_found in list_files(file_path):
print(file_founds)
But of course the script make 2 time the same search :(
Thanks so much for any suggest !!
def return_ext():
file_pasok = ["AAAA.txt", "BBBB.txt"]
for ext_f in file_pasok:
yield ext_f
def list_files(file_path):
found_ext = return_ext()
exts = list(found_ext)
for _, dirnames, filenames in os.walk(file_path):
if not filenames:
continue
for ext in exts:
for file in fnmatch.filter(filenames, ext):
if any(fnmatch.fnmatch(file, ext) for ext in file):
file_found_str = Path(os.path.join(_, file))
file_found = str(file_found_str)
yield file_found
converted_list = list_files(file_path)
count_found = list(converted_list)
print(len(count_found))
for file_found in count_found:
print(file_found)
I'm trying to save the input from the user to a file new or overwrite the existing one with the same name but the error is saying "No such file or directory"
user_save is suppose to be an input with all files in the path he wat to save the file to
file_name name of the file where to save it
import os
user_input = input('Введите строку: ')
user_save = input('Куда хотите сохранить документ? Введите последовательность папок (через пробел)\n')
user_save = 'C\\' + user_save.replace(' ', '\\')
file_name = input('Введите имя файла: ')
file_name = file_name + '.txt'
if os.path.exists(os.path.join(user_save, file_name)):
overwrite = input('Вы действительно хотите перезаписать файл?').lower()
if overwrite == 'нет':
print('Программа не сохранена')
elif overwrite == 'да':
with open(os.path.join(user_save, file_name),'w') as file:
file.write(user_input)
file.close()
print('Файл успешно перезаписан!')
else:
with open(os.path.join(user_save, file_name)) as file:
file.write(user_input)
file.close()
print('Файл успешно перезаписан!')
I have some Python code that I have written that creates a zip file but it writes the file to the location where the Python script is located versus the folder I need it to be written to. How do I structure my code to make it write to the location I need it to?
def get_all_file_paths(directory):
file_paths = []
for root, directories, files in os.walk(directory):
for filename in files:
filepath = os.path.join(root, filename)
file_paths.append(filepath)
return file_paths
for root, subdirectories, files in os.walk(src):
if root != src + 'Errors':
for subdirectory in subdirectories:
if subdirectory != 'A' and subdirectory != 'B' and subdirectory != 'C':
print(subdirectory)
folderName = subdirectory
print('The folder name is', folderName)
print(os.path.join(root, subdirectory))
filePath = os.path.join(root, subdirectory)
file_paths = get_all_file_paths(filePath)
print('Following files will be zipped: ')
for file_name in file_paths:
print(file_name)
with ZipFile(folderName +'.zip', 'w') as zip:
for file in file_paths:
zip.write(file, os.path.relpath(file, root))
zip.close()
print('All files zipped successfully!')
I'm trying to iterate through a dir a select the first file available.
These files look like this:
img_1.png img_2.png img_3.mp4 img_4.png img_5.jpg img_6.mp4
As you can see their names are cohesive but their extensions are different. I'd like the script to iterate through each extension for each number before it moves onto the next, IE:
I assume the best way to go about it is iterating through each file and extention like this: img_1.png img_1.jpg and img_1.mp4, and if neither of the three are available, move to the next file and repeat like img_2.png img_2.jpg and img_2.mp4 until there is an available
Question:
Is it best to iterate through the files and use glob to extend a file path with the extensions? Is there a better method?
This is what I thought would work, but it doesn't:
# Gets number of files in dir
list = os.listdir(folder_path)
number_files = len(list)
# Chooses file from dir
e = 0
for i in range(number_files):
try:
chosen_file = folder_path + "img_" + str(e)
for ext in ('*.jpg', '*.png', '*.mp4'):
full_path = chosen_file.extend(glob(join(chosen_file, ext)))
print (full_path)
#random_file = random.choice(os.listdir(folder_path)) # Chooses random file
except:
e += 1
print ('Hit except')
Are there other files in the folder with different names that you do not want to select or are all the files in the folder of interest? Is all that matters that they have the those 3 extensions or are the names important as well?
If you are only interested in files with those 3 extensions then this code will work
import os
import glob
folder_path = 'test\\'
e = 0
for r,d,f in os.walk(folder_path):
for file in f:
extensions = ['.jpg', '.png', '.mp4']
for ext in extensions:
if file.endswith(ext):
full_path = os.path.join(folder_path, file)
print (full_path)
else:
e += 1
print ('Hit except')
Given:
$ ls /tmp
img_1.png img_1.jpg img_2.png img_4.png img_5.jpg img_3.mp4 img_6.mp4
You can use pathlib and a more targeted glob:
from pathlib import Path
p=Path('/tmp')
for fn in (x for x in p.glob('img_[0-9].*')
if x.suffix in ('.png', '.jpg', '.mp4')):
print(fn)
Prints:
/tmp/img_1.png
/tmp/img_1.jpg
/tmp/img_2.png
/tmp/img_4.png
/tmp/img_5.jpg
/tmp/img_3.mp4
/tmp/img_6.mp4
Answer:
Decided to not use glob and did this instead:
i = 0
for i in range(number_files):
try:
chosen_file = folder_path + "img_" + str(i)
jpg_file = chosen_file + ".jpg"
png_file = chosen_file + ".png"
mp4_file = chosen_file + ".mp4"
if os.path.exists(png_file) == True:
print ('png true')
print (png_file)
break
elif os.path.exists(jpg_file) == True:
print ('jpg true')
print (jpg_file)
break
elif os.path.exists(mp4_file) == True:
print ('mp4 true')
print (mp4_file)
break
except:
i += 1
print ('false')
Under Linux / bash, how can I obtain a plain-text representation of a directory of its contents? (Note that by "plain-text" here I mean "UTF-8").
In other words, how could I "pack" or "archive" a directory (with contents - including binary files) as a plain text file - such that I could "unpack" it later, and obtain the same directory with its contents?
I was interested in this for a while, and I think I finally managed to cook up a script that works in both Python 2.7 and 3.4 -- however, I'd still like to know if there is something else that does the same. Here it is as a Gist (with some more comments):
https://gist.github.com/anonymous/1a68bf2c9134fd5312219c8f68713632
Otherwise, I'm posting a slightly abridged version here (below) for reference.
The usage is: to archive/pack into a .json text file:
python archdir2text-json.py -a /tmp > myarchdir.json
... and to unpack from the .json text file into the current (calling) directory:
python archdir2text-json.py -u myarchdir.json
Binary files are handled as base64.
Here is the script:
archdir2text-json.py
#!/usr/bin/env python
import pprint, inspect
import argparse
import os
import stat
import errno
import base64
import codecs
class SmartDescriptionFormatter(argparse.RawDescriptionHelpFormatter):
def _fill_text(self, text, width, indent):
if text.startswith('R|'):
paragraphs = text[2:].splitlines()
rebroken = [argparse._textwrap.wrap(tpar, width) for tpar in paragraphs]
rebrokenstr = []
for tlinearr in rebroken:
if (len(tlinearr) == 0):
rebrokenstr.append("")
else:
for tlinepiece in tlinearr:
rebrokenstr.append(tlinepiece)
return '\n'.join(rebrokenstr)
return argparse.RawDescriptionHelpFormatter._fill_text(self, text, width, indent)
textchars = bytearray({7,8,9,10,12,13,27} | set(range(0x20, 0x100)) - {0x7f})
is_binary_string = lambda bytes: bool(bytes.translate(None, textchars))
cwd = os.getcwd()
if os.name == 'nt':
import win32api, win32con
def folder_is_hidden(p):
if os.name== 'nt':
attribute = win32api.GetFileAttributes(p)
return attribute & (win32con.FILE_ATTRIBUTE_HIDDEN | win32con.FILE_ATTRIBUTE_SYSTEM)
else:
return os.path.basename(p).startswith('.') #linux-osx
def path_hierarchy(path):
hierarchy = {
'type': 'folder',
'name': os.path.basename(path),
'path': path,
}
try:
cleared_contents = [contents
for contents in os.listdir(path)
if not(
os.path.isdir(os.path.join(path, contents))
and
folder_is_hidden(os.path.join(path, contents))
)]
hierarchy['children'] = [
path_hierarchy(os.path.join(path, contents))
for contents in cleared_contents
]
except OSError as e:
if e.errno == errno.ENOTDIR:
hierarchy['type'] = 'file'
else:
hierarchy['type'] += " " + str(e)
if hierarchy['type'] == 'file':
isfifo = stat.S_ISFIFO(os.stat(hierarchy['path']).st_mode)
if isfifo:
ftype = "fifo"
else:
try:
data = open(hierarchy['path'], 'rb').read()
ftype = "bin" if is_binary_string(data) else "txt"
if (ftype == "txt"):
hierarchy['content'] = data.decode("utf-8")
else:
hierarchy['content'] = base64.b64encode(data).decode("utf-8")
except Exception as e:
ftype = str(e)
hierarchy['ftype'] = ftype
return hierarchy
def recurse_unpack(inobj, relpath=""):
if (inobj['type'] == "folder"):
rpname = relpath + inobj['name']
sys.stderr.write("folder name: " + rpname + os.linesep);
os.mkdir(rpname)
for tchild in inobj['children']:
recurse_unpack(tchild, relpath=relpath+inobj['name']+os.sep)
elif (inobj['type'] == "file"):
rfname = relpath + inobj['name']
sys.stderr.write("file name: " + rfname + os.linesep)
if inobj['ftype'] == "txt":
with codecs.open(rfname, "w", "utf-8") as text_file:
text_file.write(inobj['content'])
elif inobj['ftype'] == "bin":
with open(rfname, "wb") as bin_file:
bin_file.write(base64.b64decode(inobj['content']))
if __name__ == '__main__':
import json
import sys
parser = argparse.ArgumentParser(formatter_class=SmartDescriptionFormatter, description="""R|Command-line App that packs/archives (and vice-versa) a directory to a plain-text .json file; should work w/ both Python 2.7 and 3.4
see full help text in https://gist.github.com/anonymous/1a68bf2c9134fd5312219c8f68713632""")
parser.add_argument('input_paths', type=str, nargs='*', default=['.'],
help='Paths to files/directories to include in the archive; or path to .json archive file')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('-a', '--archive', action='store_true', help="Interpret input_paths as paths to files/directories, and archive them to a .json file (output to stdout)")
group.add_argument('-u', '--unpack', action='store_true', help="Interpret input_paths as path to an archive .json file, and unpack it in the current directory")
args = parser.parse_args()
if (args.archive):
valid_input_paths = []
for p in args.input_paths:
if os.path.isdir(p) or os.path.exists(p):
valid_input_paths.append(p)
else:
sys.stderr.write("Ignoring invalid input path: " + p + os.linesep)
sys.stderr.write("Encoding input path(s): " + str(valid_input_paths) + os.linesep)
path_hier_arr = [path_hierarchy(vp) for vp in valid_input_paths]
outjson = json.dumps(path_hier_arr, indent=2, sort_keys=True, separators=(',', ': '))
print(outjson)
elif (args.unpack):
valid_input_paths = []
for p in args.input_paths:
if os.path.isdir(p) or os.path.exists(p):
valid_input_paths.append(p)
else:
sys.stderr.write("Ignoring invalid input path: " + p + os.linesep)
for vp in valid_input_paths:
with open(vp) as data_file:
data = json.load(data_file)
for datachunk in data:
recurse_unpack(datachunk)