Skip to content

Commit

Permalink
Update from 5cf8de1
Browse files Browse the repository at this point in the history
  • Loading branch information
pelson committed Nov 6, 2023
1 parent 5451b79 commit cd75201
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 13 deletions.
2 changes: 1 addition & 1 deletion simple_repository_browser/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,5 +69,5 @@ def main():


if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
logging.basicConfig(level=logging.WARNING)
main()
10 changes: 5 additions & 5 deletions simple_repository_browser/crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ async def crawl_recursively(
while packages_for_reindexing - seen:
remaining_packages = packages_for_reindexing - seen
pkg_name = remaining_packages.pop()
print(
logging.info(
f"Index iteration loop. Looking at {pkg_name}, with {len(remaining_packages)} remaining ({len(seen)} having been completed)",
)
seen.add(pkg_name)
Expand Down Expand Up @@ -93,7 +93,7 @@ async def crawl_recursively(
)
except InvalidRequirement as err:
# See https://discuss.python.org/t/pip-supporting-non-pep508-dependency-specifiers/23107.
print(f"Problem handling package {pkg_name}: {err}")
logging.warning(f"Problem handling package {pkg_name}: {err}")
continue

for dist in pkg_info.requires_dist:
Expand Down Expand Up @@ -126,13 +126,13 @@ async def refetch_hook(self) -> None:
for _, row in zip(range(100), s['rows']):
popular_projects.append(row['project'])
except Exception as err:
print(f'Problem fetching popular projects ({err})')
logging.warning(f'Problem fetching popular projects ({err})')
pass

await self.crawl_recursively(packages_w_dist_info | set(popular_projects))

async def run_reindex_periodically(self) -> None:
print("Starting the reindexing loop")
logging.info("Starting the reindexing loop")
while True:
try:
await self.refetch_hook()
Expand Down Expand Up @@ -163,7 +163,7 @@ async def fetch_pkg_info(
return info_file, pkg_info

if force_recache:
print('Recaching')
logging.info('Recaching')

fetch_projects.insert_if_missing(
self._projects_db,
Expand Down
11 changes: 6 additions & 5 deletions simple_repository_browser/fetch_projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
# or submit itself to any jurisdiction.

import datetime
import logging

from simple_repository import SimpleRepository

Expand Down Expand Up @@ -46,13 +47,13 @@ def update_summary(conn, name: str, summary: str, release_date: datetime.datetim

async def fully_populate_db(connection, index: SimpleRepository):
con = connection
print('Fetching names from index')
logging.info('Fetching names from index')

project_list = await index.get_project_list()
project_names = [
(project.normalized_name, project.name) for project in project_list.projects
]
print('Inserting all new names (if any)')
logging.info('Inserting all new names (if any)')
with con as cursor:
for canonical_name, name in project_names:
cursor.execute(
Expand All @@ -66,12 +67,12 @@ async def fully_populate_db(connection, index: SimpleRepository):
index_canonical_names = {normed_name for normed_name, _ in project_names}

if not index_canonical_names:
print("No names found on the index. Not removing from the database, as this is likely a problem with the index.")
logging.warning("No names found on the index. Not removing from the database, as this is likely a problem with the index.")
return

names_in_db_no_longer_in_index = db_canonical_names - index_canonical_names
if names_in_db_no_longer_in_index:
print(
logging.warning(
f'Removing the following { len(names_in_db_no_longer_in_index) } names from the database:\n '
"\n ".join(list(names_in_db_no_longer_in_index)[:2000]),
)
Expand All @@ -84,4 +85,4 @@ async def fully_populate_db(connection, index: SimpleRepository):
''',
(name,),
)
print('DB synchronised with index')
logging.info('DB synchronised with index')
5 changes: 3 additions & 2 deletions simple_repository_browser/view.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import fastapi
import jinja2
from packaging.requirements import Requirement
from starlette.datastructures import URL

from . import model

Expand All @@ -26,14 +27,14 @@ def create_templates_environment(self) -> jinja2.Environment:
templates = jinja2.Environment(loader=loader, autoescape=True, undefined=jinja2.StrictUndefined)

@jinja2.pass_context
def url_for(context: typing.Mapping[str, typing.Any], name: str, **path_params: typing.Any) -> str:
def url_for(context: typing.Mapping[str, typing.Any], name: str, **path_params: typing.Any) -> URL:
request: fastapi.Request = context["request"]
# We don't use request.url_for, as it always returns an absolute URL.
# This prohibits running behind a proxy which doesn't correctly set
# X-Forwarded-Proto / X-Forwarded-Prefix, such as the OpenShift ingress.
# See https://github.com/encode/starlette/issues/538#issuecomment-1135096753 for the
# proposed solution.
return str(request.app.url_path_for(name, **path_params))
return URL(str(request.app.url_path_for(name, **path_params)))

def sizeof_fmt(num: float, suffix: str = "B"):
for unit in ("", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"):
Expand Down

0 comments on commit cd75201

Please sign in to comment.