support pixabay
This commit is contained in:
@@ -14,33 +14,33 @@ from app.utils import utils
|
||||
requested_count = 0
|
||||
|
||||
|
||||
def round_robin_api_key():
|
||||
pexels_api_keys = config.app.get("pexels_api_keys")
|
||||
if not pexels_api_keys:
|
||||
def get_api_key(cfg_key: str):
|
||||
api_keys = config.app.get(cfg_key)
|
||||
if not api_keys:
|
||||
raise ValueError(
|
||||
f"\n\n##### pexels_api_keys is not set #####\n\nPlease set it in the config.toml file: {config.config_file}\n\n{utils.to_json(config.app)}")
|
||||
f"\n\n##### {cfg_key} is not set #####\n\nPlease set it in the config.toml file: {config.config_file}\n\n"
|
||||
f"{utils.to_json(config.app)}")
|
||||
|
||||
# if only one key is provided, return it
|
||||
if isinstance(pexels_api_keys, str):
|
||||
return pexels_api_keys
|
||||
if isinstance(api_keys, str):
|
||||
return api_keys
|
||||
|
||||
global requested_count
|
||||
requested_count += 1
|
||||
return pexels_api_keys[requested_count % len(pexels_api_keys)]
|
||||
return api_keys[requested_count % len(api_keys)]
|
||||
|
||||
|
||||
def search_videos(search_term: str,
|
||||
minimum_duration: int,
|
||||
video_aspect: VideoAspect = VideoAspect.portrait,
|
||||
) -> List[MaterialInfo]:
|
||||
def search_videos_pexels(search_term: str,
|
||||
minimum_duration: int,
|
||||
video_aspect: VideoAspect = VideoAspect.portrait,
|
||||
) -> List[MaterialInfo]:
|
||||
aspect = VideoAspect(video_aspect)
|
||||
video_orientation = aspect.name
|
||||
video_width, video_height = aspect.to_resolution()
|
||||
|
||||
api_key = get_api_key("pexels_api_keys")
|
||||
headers = {
|
||||
"Authorization": round_robin_api_key()
|
||||
"Authorization": api_key
|
||||
}
|
||||
proxies = config.pexels.get("proxies", None)
|
||||
# Build URL
|
||||
params = {
|
||||
"query": search_term,
|
||||
@@ -48,10 +48,10 @@ def search_videos(search_term: str,
|
||||
"orientation": video_orientation
|
||||
}
|
||||
query_url = f"https://api.pexels.com/videos/search?{urlencode(params)}"
|
||||
logger.info(f"searching videos: {query_url}, with proxies: {proxies}")
|
||||
logger.info(f"searching videos: {query_url}, with proxies: {config.proxy}")
|
||||
|
||||
try:
|
||||
r = requests.get(query_url, headers=headers, proxies=proxies, verify=False, timeout=(30, 60))
|
||||
r = requests.get(query_url, headers=headers, proxies=config.proxy, verify=False, timeout=(30, 60))
|
||||
response = r.json()
|
||||
video_items = []
|
||||
if "videos" not in response:
|
||||
@@ -83,6 +83,59 @@ def search_videos(search_term: str,
|
||||
return []
|
||||
|
||||
|
||||
def search_videos_pixabay(search_term: str,
|
||||
minimum_duration: int,
|
||||
video_aspect: VideoAspect = VideoAspect.portrait,
|
||||
) -> List[MaterialInfo]:
|
||||
aspect = VideoAspect(video_aspect)
|
||||
|
||||
video_width, video_height = aspect.to_resolution()
|
||||
|
||||
api_key = get_api_key("pixabay_api_keys")
|
||||
# Build URL
|
||||
params = {
|
||||
"q": search_term,
|
||||
"video_type": "all", # Accepted values: "all", "film", "animation"
|
||||
"per_page": 50,
|
||||
"key": api_key
|
||||
}
|
||||
query_url = f"https://pixabay.com/api/videos/?{urlencode(params)}"
|
||||
logger.info(f"searching videos: {query_url}, with proxies: {config.proxy}")
|
||||
|
||||
try:
|
||||
r = requests.get(query_url, proxies=config.proxy, verify=False, timeout=(30, 60))
|
||||
response = r.json()
|
||||
video_items = []
|
||||
if "hits" not in response:
|
||||
logger.error(f"search videos failed: {response}")
|
||||
return video_items
|
||||
videos = response["hits"]
|
||||
# loop through each video in the result
|
||||
for v in videos:
|
||||
duration = v["duration"]
|
||||
# check if video has desired minimum duration
|
||||
if duration < minimum_duration:
|
||||
continue
|
||||
video_files = v["videos"]
|
||||
# loop through each url to determine the best quality
|
||||
for video_type in video_files:
|
||||
video = video_files[video_type]
|
||||
w = int(video["width"])
|
||||
h = int(video["height"])
|
||||
if w >= video_width:
|
||||
item = MaterialInfo()
|
||||
item.provider = "pixabay"
|
||||
item.url = video["url"]
|
||||
item.duration = duration
|
||||
video_items.append(item)
|
||||
break
|
||||
return video_items
|
||||
except Exception as e:
|
||||
logger.error(f"search videos failed: {str(e)}")
|
||||
|
||||
return []
|
||||
|
||||
|
||||
def save_video(video_url: str, save_dir: str = "") -> str:
|
||||
if not save_dir:
|
||||
save_dir = utils.storage_dir("cache_videos")
|
||||
@@ -101,9 +154,8 @@ def save_video(video_url: str, save_dir: str = "") -> str:
|
||||
return video_path
|
||||
|
||||
# if video does not exist, download it
|
||||
proxies = config.pexels.get("proxies", None)
|
||||
with open(video_path, "wb") as f:
|
||||
f.write(requests.get(video_url, proxies=proxies, verify=False, timeout=(60, 240)).content)
|
||||
f.write(requests.get(video_url, proxies=config.proxy, verify=False, timeout=(60, 240)).content)
|
||||
|
||||
if os.path.exists(video_path) and os.path.getsize(video_path) > 0:
|
||||
try:
|
||||
@@ -124,6 +176,7 @@ def save_video(video_url: str, save_dir: str = "") -> str:
|
||||
|
||||
def download_videos(task_id: str,
|
||||
search_terms: List[str],
|
||||
source: str = "pexels",
|
||||
video_aspect: VideoAspect = VideoAspect.portrait,
|
||||
video_contact_mode: VideoConcatMode = VideoConcatMode.random,
|
||||
audio_duration: float = 0.0,
|
||||
@@ -132,8 +185,11 @@ def download_videos(task_id: str,
|
||||
valid_video_items = []
|
||||
valid_video_urls = []
|
||||
found_duration = 0.0
|
||||
search_videos = search_videos_pexels
|
||||
if source == "pixabay":
|
||||
search_videos = search_videos_pixabay
|
||||
|
||||
for search_term in search_terms:
|
||||
# logger.info(f"searching videos for '{search_term}'")
|
||||
video_items = search_videos(search_term=search_term,
|
||||
minimum_duration=max_clip_duration,
|
||||
video_aspect=video_aspect)
|
||||
@@ -178,4 +234,4 @@ def download_videos(task_id: str,
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
download_videos("test123", ["cat"], audio_duration=100)
|
||||
download_videos("test123", ["Money Exchange Medium"], audio_duration=100, source="pixabay")
|
||||
|
||||
@@ -44,6 +44,11 @@ def start(task_id, params: VideoParams):
|
||||
else:
|
||||
logger.debug(f"video script: \n{video_script}")
|
||||
|
||||
if not video_script:
|
||||
sm.state.update_task(task_id, state=const.TASK_STATE_FAILED)
|
||||
logger.error("failed to generate video script.")
|
||||
return
|
||||
|
||||
sm.state.update_task(task_id, state=const.TASK_STATE_PROCESSING, progress=10)
|
||||
|
||||
logger.info("\n\n## generating video terms")
|
||||
@@ -60,6 +65,11 @@ def start(task_id, params: VideoParams):
|
||||
|
||||
logger.debug(f"video terms: {utils.to_json(video_terms)}")
|
||||
|
||||
if not video_terms:
|
||||
sm.state.update_task(task_id, state=const.TASK_STATE_FAILED)
|
||||
logger.error("failed to generate video terms.")
|
||||
return
|
||||
|
||||
script_file = path.join(utils.task_dir(task_id), f"script.json")
|
||||
script_data = {
|
||||
"script": video_script,
|
||||
@@ -128,9 +138,10 @@ def start(task_id, params: VideoParams):
|
||||
print(material_info)
|
||||
downloaded_videos.append(material_info.url)
|
||||
else:
|
||||
logger.info("\n\n## downloading videos")
|
||||
logger.info(f"\n\n## downloading videos from {params.video_source}")
|
||||
downloaded_videos = material.download_videos(task_id=task_id,
|
||||
search_terms=video_terms,
|
||||
source=params.video_source,
|
||||
video_aspect=params.video_aspect,
|
||||
video_contact_mode=params.video_concat_mode,
|
||||
audio_duration=audio_duration * params.video_count,
|
||||
|
||||
Reference in New Issue
Block a user