How to write a zipfile to another location - python-3.x

I have some Python code that I have written that creates a zip file but it writes the file to the location where the Python script is located versus the folder I need it to be written to. How do I structure my code to make it write to the location I need it to?
def get_all_file_paths(directory):
file_paths = []
for root, directories, files in os.walk(directory):
for filename in files:
filepath = os.path.join(root, filename)
file_paths.append(filepath)
return file_paths
for root, subdirectories, files in os.walk(src):
if root != src + 'Errors':
for subdirectory in subdirectories:
if subdirectory != 'A' and subdirectory != 'B' and subdirectory != 'C':
print(subdirectory)
folderName = subdirectory
print('The folder name is', folderName)
print(os.path.join(root, subdirectory))
filePath = os.path.join(root, subdirectory)
file_paths = get_all_file_paths(filePath)
print('Following files will be zipped: ')
for file_name in file_paths:
print(file_name)
with ZipFile(folderName +'.zip', 'w') as zip:
for file in file_paths:
zip.write(file, os.path.relpath(file, root))
zip.close()
print('All files zipped successfully!')

Related

How get on same time count list files and files adress?

i need to ask if possible and how get on same time the sum of list files in directory and subdirectory with fnmatch filter and files adress.
I use for now this:
def return_ext():
file_pasok = ["AAAAA.txt", "BBBBBB.txt"]
for i in range(len(file_pasok)):
for ext_f in file_pasok:
return ext_f
def list_files(file_path):
ext = return_ext()
for _, dirnames, filenames in os.walk(file_path):
if not filenames:
continue
for file in fnmatch.filter(filenames, ext):
file_found_str = Path(os.path.join(_, file))
file_found = str(file_found_str)
yield file_found
ext = return_ext()
########GOT HOW MANY FILE FOUND
count_founded = sum([len(fnmatch.filter(files, ext)) for r, d, files in os.walk(file_path)])
########GOT LIST ADRESS FILE FOUND
for file_found in list_files(file_path):
print(file_founds)
But of course the script make 2 time the same search :(
Thanks so much for any suggest !!
def return_ext():
file_pasok = ["AAAA.txt", "BBBB.txt"]
for ext_f in file_pasok:
yield ext_f
def list_files(file_path):
found_ext = return_ext()
exts = list(found_ext)
for _, dirnames, filenames in os.walk(file_path):
if not filenames:
continue
for ext in exts:
for file in fnmatch.filter(filenames, ext):
if any(fnmatch.fnmatch(file, ext) for ext in file):
file_found_str = Path(os.path.join(_, file))
file_found = str(file_found_str)
yield file_found
converted_list = list_files(file_path)
count_found = list(converted_list)
print(len(count_found))
for file_found in count_found:
print(file_found)

reading shp files to geopandas to dictionary with the same name

I'm walking through a directory structure finding all files with the .shp extension and storing them in a dictionary. However, some files are named the same, how do I store files of the same name in a dictionary without overwriting? Appending the file structure to the name would be acceptable in this case. How is that done?
Current 'working' code:
def get_all_shp(mydir):
# layers = []
data = {}
for root, dirs, files in os.walk(mydir):
for file in files:
try:
if file.endswith(".shp"):
shp = os.path.join(root, file)
# layers.append(shp)
path = root + "/" + file
# print("path: " + path)
data[file] = gpd.read_file(path)
except:
pass
def get_all_shp(mydir):
# layers = []
data = {}
for root, dirs, files in os.walk(mydir):
for file in files:
try:
if file.endswith(".shp"):
shp = os.path.join(root, file)
# layers.append(shp)
path = root + "/" + file
# print("path: " + path)
data[path] = gpd.read_file(path)
except:
pass

Batch File Rename with Python

Below is my code to batch rename pictures inside a given directory
def multi_filename_change():
i = 0
files = askstring('Select your folder', 'Paste your directory path where your images are stored.')
for file in os.listdir(files):
if not file.startswith('.'):
file_name = askstring('Add file name', 'Please enter a name for your files.')
src = file
dst = file_name + str(i) + ".jpg"
os.rename(src, dst)
i += 1
When this is run I get the below error message:
os.rename(src, dst) FileNotFoundError: [Errno 2] No such file or directory: '360007_space-wallpaper-4k.jpg' -> 'test0.jpg'
I cannot seem to solve this and its probably an easy one for you experts :)
Thanks
Source should be appended with existing directory, not just filename
src =files+file
Or
src=os.path.join(files, file)

How to change the files extension using python

In a Folder, I have 100 files with extensions '.txt','.doc','.pdf'. I need to rename the files as:
If the filename ends with '.txt' -> replace filename ends with '.jpg'
If the filename ends with '.doc' -> replace filename ends with '.mp3'
If the filename ends with '.pdf' -> replace filename ends with '.mp4'
I have tried this one so far
import os,sys
folder ="C:/Users/TestFolder"
for filename in os.listdir(folder):
base_file, ext = os.path.splitext(filename)
print(ext)
if ext == '.txt':
print("------")
print(filename)
print(base_file)
print(ext)
os.rename(filename, base_file + '.jpg')
elif ext == '.doc':
print("------")
os.rename(filename, base_file + '.mp3')
elif ext == '.pdf':
print("------")
os.rename(filename, base_file + '.mp4')
else:
print("Not found")
To begin with , you can store your mappings in a dictionary, then when you are iterating over your folder, and you find the extension, just use the mapping to make the new file name and save it.
import os
folder ="C:/Users/TestFolder"
#Dictionary for extension mappings
rename_dict = {'txt': 'jpg', 'doc': 'mp3', 'pdf': 'mp4'}
for filename in os.listdir(folder):
#Get the extension and remove . from it
base_file, ext = os.path.splitext(filename)
ext = ext.replace('.','')
#If you find the extension to rename
if ext in rename_dict:
#Create the new file name
new_ext = rename_dict[ext]
new_file = base_file + '.' + new_ext
#Create the full old and new path
old_path = os.path.join(folder, filename)
new_path = os.path.join(folder, new_file)
#Rename the file
os.rename(old_path, new_path)

How to open and append nested zip archives into dataframe without extracting?

I am trying to open a large number of csv files which found in several layers of zip files. Given the nature of this project, I am trying to open, read_csv them into a dataframe, append that data to an aggregate dataframe then continue through the loop.
Example: Folder Directory/First Zip/Second Zip/Third Zip/csv file.csv
My existing code can loop through the contents of the second and third zip file and get the name of each csv file. I am aware that this code can probably be made more simple by importing glob, but I'm unfamiliar.
import os
import pandas as pd
import zipfile, re, io
directory = 'C:/Test/'
os.chdir(directory)
fname = "test" + ".zip"
with zipfile.ZipFile(fname, 'r') as zfile:
# second level of zip files
for zipname in zfile.namelist():
if re.search(r'\.zip$', zipname) != None:
zfiledata = io.BytesIO(zfile.read(zipname))
# third level of zip files
with zipfile.ZipFile(zfiledata) as zfile2:
for zipname2 in zfile2.namelist():
# this zipfile contains xml and csv contents. This filters out the xmls
if zipname2.find("csv") > 0:
zfiledata2 = io.BytesIO(zfile2.read(zipname2))
with zipfile.ZipFile(zfiledata2) as zfile3:
fullpath = directory + fname + "/" + zipname + "/" + zipname2 + "/"
# csv file names are always the same as their zips. this cleans the string.
csvf = zipname2.replace('_csv.zip',".csv")
filehandle = open(fullpath, 'rb')
# the above statement is erroring: FileNotFoundError: [Errno 2] No such file or directory:
zfilehandle = zipfile.ZipFile(filehandle)
data = []
csvdata = StringIO.StringIO(zfilehandle.read(csvf))
df = pd.read_csv(csvdata)
data.append(df)
print(data.head())

Resources