Compare commits
No commits in common. "e2dc9c8a8d55eaaa27ce91b92018cd4529586a92" and "f9faf4f8a4c5956732fb67844445b7be84af0130" have entirely different histories.
e2dc9c8a8d
...
f9faf4f8a4
|
@ -1,51 +0,0 @@
|
||||||
param(
|
|
||||||
[Parameter(Mandatory=$true)]
|
|
||||||
[string]$imageName
|
|
||||||
)
|
|
||||||
|
|
||||||
# Save the Docker image as a tarball
|
|
||||||
$imageTar = "$imageName.tar"
|
|
||||||
docker save -o $imageTar $imageName
|
|
||||||
|
|
||||||
# Create a temporary directory to extract layers
|
|
||||||
$tempDir = [System.IO.Path]::GetTempPath() + [System.Guid]::NewGuid().ToString()
|
|
||||||
New-Item -ItemType Directory -Force -Path $tempDir
|
|
||||||
|
|
||||||
# Extract the image tarball using 'tar' command
|
|
||||||
tar -xf $imageTar -C $tempDir
|
|
||||||
|
|
||||||
# Function to list files in a layer
|
|
||||||
function ListLayerFiles {
|
|
||||||
param (
|
|
||||||
[string]$layerPath
|
|
||||||
)
|
|
||||||
|
|
||||||
$layerTar = Get-ChildItem -Path $layerPath -Filter "*.tar" -File
|
|
||||||
$layerDir = [System.IO.Path]::Combine($tempDir, [System.IO.Path]::GetRandomFileName())
|
|
||||||
New-Item -ItemType Directory -Force -Path $layerDir
|
|
||||||
|
|
||||||
# Extract the layer tarball using 'tar'
|
|
||||||
tar -xf $layerTar.FullName -C $layerDir
|
|
||||||
|
|
||||||
# List files in the layer
|
|
||||||
Write-Host "Files in layer:"
|
|
||||||
Get-ChildItem -Path $layerDir -Recurse
|
|
||||||
}
|
|
||||||
|
|
||||||
# List all layers and ask user to choose one
|
|
||||||
$layerDirs = Get-ChildItem -Path $tempDir -Directory
|
|
||||||
$layerDirs | ForEach-Object -Begin { $i = 0 } -Process {
|
|
||||||
Write-Host "[$i]: $($_.Name)"
|
|
||||||
$i++
|
|
||||||
}
|
|
||||||
|
|
||||||
$userChoice = Read-Host "Enter the index of the layer to list"
|
|
||||||
if ($userChoice -lt $layerDirs.Count -and $userChoice -ge 0) {
|
|
||||||
ListLayerFiles -layerPath $layerDirs[$userChoice].FullName
|
|
||||||
} else {
|
|
||||||
Write-Host "Invalid index selected."
|
|
||||||
}
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
Remove-Item -Path $imageTar
|
|
||||||
Remove-Item -Path $tempDir -Recurse
|
|
|
@ -2,42 +2,60 @@
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
|
|
||||||
def flatten_and_split(input_string):
|
def unique_combined_list(*inputs):
|
||||||
elements = input_string.replace("\n", "").split(",")
|
# Combine lists
|
||||||
flat_list = [item.strip() for element in elements for item in element.split("-")]
|
|
||||||
return ",".join(flat_list)
|
|
||||||
|
|
||||||
|
|
||||||
def combine_and_uniquify(*inputs):
|
|
||||||
combined_list = [
|
combined_list = [
|
||||||
item.strip().title() for input_list in inputs for item in input_list.split(",")
|
item.strip().title() for input_list in inputs for item in input_list.split(",")
|
||||||
]
|
]
|
||||||
unique_names = set(combined_list)
|
|
||||||
|
|
||||||
final_set = set()
|
# Create an empty list to store the final unique names
|
||||||
for name in unique_names:
|
final_list = []
|
||||||
|
|
||||||
|
# Check for reversed names
|
||||||
|
for name in combined_list:
|
||||||
parts = name.split()
|
parts = name.split()
|
||||||
|
|
||||||
|
# If the name has two words, check for its reversed variant
|
||||||
if len(parts) == 2:
|
if len(parts) == 2:
|
||||||
first, last = parts
|
first, last = parts
|
||||||
reversed_name = f"{last} {first}"
|
reversed_name = f"{last} {first}"
|
||||||
# Add the name if its reversed variant is not already in the final set
|
|
||||||
if reversed_name not in final_set:
|
|
||||||
final_set.add(name)
|
|
||||||
else:
|
|
||||||
final_set.add(name)
|
|
||||||
|
|
||||||
return ",".join(sorted(final_set))
|
# If neither the name nor its reversed variant is in the final list, add the name
|
||||||
|
if name not in final_list and reversed_name not in final_list:
|
||||||
|
final_list.append(name)
|
||||||
|
# If it's a single-word name, simply add it if it's not in the final list
|
||||||
|
else:
|
||||||
|
if name not in final_list:
|
||||||
|
final_list.append(name)
|
||||||
|
|
||||||
|
# Sort the list
|
||||||
|
sorted_list = sorted(final_list)
|
||||||
|
|
||||||
|
# Convert the list back to a comma-separated string
|
||||||
|
output = ",".join(sorted_list)
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
# Create an argument parser
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Combine multiple comma-separated lists into one unique sorted list."
|
description="Combine multiple comma-separated lists into one unique sorted list."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Add a variable number of input lists
|
||||||
parser.add_argument("lists", nargs="+", type=str, help="Comma-separated lists.")
|
parser.add_argument("lists", nargs="+", type=str, help="Comma-separated lists.")
|
||||||
|
|
||||||
|
# Parse the arguments
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
processed_lists = [flatten_and_split(lst) for lst in args.lists]
|
# If only one list is provided, use it twice
|
||||||
result = combine_and_uniquify(*processed_lists)
|
if len(args.lists) == 1:
|
||||||
|
args.lists.append(args.lists[0])
|
||||||
|
|
||||||
|
# Get the unique combined list
|
||||||
|
result = unique_combined_list(*args.lists)
|
||||||
|
|
||||||
print(result)
|
print(result)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,43 +0,0 @@
|
||||||
import json
|
|
||||||
import argparse
|
|
||||||
from datetime import datetime, timezone
|
|
||||||
|
|
||||||
|
|
||||||
def process_json_file(filename):
|
|
||||||
# Read JSON data from file
|
|
||||||
with open(filename, "r") as file:
|
|
||||||
data = json.load(file)
|
|
||||||
|
|
||||||
# Process each game
|
|
||||||
for game in data["response"]["games"]:
|
|
||||||
# Convert playtime from minutes to hours
|
|
||||||
game["playtime_forever"] /= 60
|
|
||||||
|
|
||||||
# Convert Unix timestamp to readable date and time in UTC
|
|
||||||
if game["rtime_last_played"] != 0:
|
|
||||||
game["rtime_last_played"] = datetime.fromtimestamp(
|
|
||||||
game["rtime_last_played"], timezone.utc
|
|
||||||
).strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
else:
|
|
||||||
game["rtime_last_played"] = "Not Played"
|
|
||||||
|
|
||||||
# Return the modified data
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description="Process a JSON file containing game data."
|
|
||||||
)
|
|
||||||
parser.add_argument("filename", help="JSON file to be processed")
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
# Process the JSON file
|
|
||||||
modified_data = process_json_file(args.filename)
|
|
||||||
|
|
||||||
# Print the modified data
|
|
||||||
print(json.dumps(modified_data, indent=4))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
50
ytdlqueue.py
50
ytdlqueue.py
|
@ -1,50 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
from queue import Queue
|
|
||||||
from threading import Thread
|
|
||||||
|
|
||||||
# Queue to hold the URLs
|
|
||||||
download_queue = Queue()
|
|
||||||
|
|
||||||
def download_video(url):
|
|
||||||
"""
|
|
||||||
Function to download a video using youtube-dl.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
print(f"Downloading {url}...")
|
|
||||||
subprocess.run(["yt-dlp", url], check=True)
|
|
||||||
print(f"Finished downloading {url}")
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
print(f"Failed to download {url}: {e}")
|
|
||||||
|
|
||||||
def worker():
|
|
||||||
"""
|
|
||||||
Worker function to process items in the queue.
|
|
||||||
"""
|
|
||||||
while True:
|
|
||||||
url = download_queue.get()
|
|
||||||
download_video(url)
|
|
||||||
download_queue.task_done()
|
|
||||||
|
|
||||||
def main():
|
|
||||||
# Start the worker thread
|
|
||||||
thread = Thread(target=worker)
|
|
||||||
thread.daemon = True
|
|
||||||
thread.start()
|
|
||||||
|
|
||||||
print("Enter URLs to download. Type 'exit' to quit.")
|
|
||||||
while True:
|
|
||||||
url = input("URL: ").strip()
|
|
||||||
if url.lower() == 'exit':
|
|
||||||
break
|
|
||||||
download_queue.put(url)
|
|
||||||
|
|
||||||
# Wait for all downloads to finish
|
|
||||||
download_queue.join()
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
|
|
Loading…
Reference in New Issue