From 768dc0d943bb2a5b255e8259c03cc371e38ac2b0 Mon Sep 17 00:00:00 2001 From: Clara Ribeiro Date: Sun, 19 Jan 2025 13:17:29 -0300 Subject: [PATCH] fix --- .github/workflows/ci.yml | 13 ++- .github/workflows/release.yml | 61 +++++------ metrics/sonar-metrics.py | 58 ++++++++++ sonar_scripts/parser.py | 193 ---------------------------------- 4 files changed, 93 insertions(+), 232 deletions(-) create mode 100644 metrics/sonar-metrics.py delete mode 100644 sonar_scripts/parser.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0ff2bfac..fc67fc72 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -94,7 +94,8 @@ jobs: test: runs-on: ubuntu-latest - continue-on-error: true # Permite que o pipeline continue mesmo com falhas nos testes + # TO DO: adicionar variaveis de ambiente conforme necessario + # env: steps: - name: Checkout code uses: actions/checkout@v3 @@ -118,13 +119,15 @@ jobs: run: npm i - name: 🧪 Roda suíte de testes - run: npm test || true # Ignora falhas dos testes + run: npm test sonarcloud: + # TO DO: adicionar variaveis de ambiente conforme necessario + # env: name: sonarcloud needs: [test] runs-on: ubuntu-latest - continue-on-error: true # Continua o pipeline mesmo com falhas nos testes + continue-on-error: true steps: - name: Checkout code uses: actions/checkout@v3 @@ -148,10 +151,10 @@ jobs: run: npx eslint -f json -o reports/eslint-report.json src || true - name: 🧪 Testes e cobertura - run: VITEST_REPORTER=vitest-sonar-reporter CI=true npm test -- --coverage || true # Adicionando || true para garantir que o erro não interrompa o fluxo + run: VITEST_REPORTER=vitest-sonar-reporter CI=true npm test -- --coverage - name: SonarCloud Scan uses: SonarSource/sonarcloud-github-action@master env: GITHUB_TOKEN: ${{ secrets.PERSONAL_TOKEN }} - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 81edc3b4..7754c40e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,6 +1,6 @@ -name: Export de métricas +name: front-release -on: +on: push: branches: [master] tags: @@ -9,46 +9,39 @@ on: branches: - master - devel - types: [ closed ] + types: [closed] + jobs: - release: - if: github.event.pull_request.merged == true && contains(github.event.pull_request.labels.*.name, 'NOT RELEASE') == false - runs-on: "ubuntu-latest" - environment: actions - + generate-release: + runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - name: Checkout code + uses: actions/checkout@v3 + + - name: 'Get Previous tag' + id: previoustag + uses: "WyriHaximus/github-action-get-previous-tag@v1" with: - fetch-depth: 0 - - - name: Install dotenv - run: pip install python-dotenv packaging pandas - - - name: Cria arquivo .env - run: | - touch ./sonar_scripts/.env - echo GITHUB_TOKEN=${{secrets.PERSONAL_TOKEN}} >> ./sonar_scripts/.env - echo RELEASE_MAJOR=${{ contains(github.event.pull_request.labels.*.name, 'MAJOR RELEASE') }} >> ./sonar_scripts/.env - echo RELEASE_MINOR=${{ contains(github.event.pull_request.labels.*.name, 'MINOR RELEASE') }} >> ./sonar_scripts/.env - echo RELEASE_FIX=${{ contains(github.event.pull_request.labels.*.name, 'FIX RELEASE') }} >> ./sonar_scripts/.env - echo DEVELOP=${{ contains(github.event.pull_request.labels.*.name, 'DEVELOP') }} >> ./sonar_scripts/.env - echo REPO=${{ github.event.repository.name }} >> ./sonar_scripts/.env + fallback: 1.0.0 - - name: Criar diretório - run: mkdir -p analytics-raw-data + - name: Use Node.js 16.x + uses: actions/setup-node@v3 + with: + node-version: 16.x - - name: Coletar métricas no SonarCloud - run: python ./sonar_scripts/parser.py + - name: Cria métricas do SonarCloud + run: python metrics/sonar-metrics.py ${{ github.event.repository.name }} ${{ github.ref_name }} - - name: Envia métricas para repo de Doc + - name: Commita arquivos de métricas do SonarCloud run: | - git config --global user.email "${{secrets.GIT}}" - git config --global user.name "${{secrets.GIT}}" - git clone --single-branch --branch main "https://x-access-token:${{secrets.PERSONAL_TOKEN}}@github.com/fga-eps-mds/2024.2-SENTINELA-DOC" doc + git config --global user.email "${{ secrets.GIT_EMAIL }}" + git config --global user.name "${{ secrets.GIT_USER }}" + git clone --single-branch --branch main "https://x-access-token:${{ secrets.PERSONAL_TOKEN }}@github.com/fga-eps-mds/2024.2-SENTINELA-DOC" doc mkdir -p doc/analytics-raw-data - cp -R analytics-raw-data/*.json doc/analytics-raw-data + cp -R fga-eps-mds*.json doc/analytics-raw-data cd doc git add . - git commit -m "Adicionando métricas do repositório ${{ github.event.repository.name }} ${{ github.ref_name }}" - git push \ No newline at end of file + git commit -m "Métricas SonarCloud - ${{ github.event.repository.name }} ${{ github.ref_name }}" + git push + echo "Arquivos de métricas gerado com sucesso." \ No newline at end of file diff --git a/metrics/sonar-metrics.py b/metrics/sonar-metrics.py new file mode 100644 index 00000000..4c10d7bd --- /dev/null +++ b/metrics/sonar-metrics.py @@ -0,0 +1,58 @@ +import json +import sys +import urllib.request +from datetime import datetime + +def generate_metrics(): + # url base do sonar, inicio da rota que devolverá as métricas + base_url = "https://sonarcloud.io/api/measures/component_tree?component=" + # prefixo da disciplina, identificador da organização no sonarcloud + prefix = "fga-eps-mds" + # todas as métricas que serão requisitadas para o sonarcloud + metrics = [ + "files", + "functions", + "complexity", + "comment_lines_density", + "duplicated_lines_density", + "coverage", + "ncloc", + "tests", + "test_errors", + "test_failures", + "test_execution_time", + "security_rating" + ] + + # nome do repositório, vem como argumento no release.yml + repository_name = sys.argv[1] + # nome da branch onde foi chamada o script, vem de argumento no release.yml + ref_name = sys.argv[2] + + # url montada + # base url = api do sonar + # prefix = id da org da disciplina + # repository_name = nome do repositorio (unido com prefix separado por _ é o identificador do projeto no sonar) + # o join do metrics une as métricas a serem solicitadas como parâmetros + # branch = específica o nome da branch para pegar as métricas daquela branch em específicp + + # Verifica se a referência é uma branch ou uma tag + url = f'{base_url}{prefix}_{repository_name}&metricKeys={",".join(metrics)}&branch={ref_name}' if 'refs/heads/' in sys.argv[2] else f'{base_url}{prefix}_{repository_name}&metricKeys={",".join(metrics)}&tag={ref_name}' + + + with urllib.request.urlopen(url) as res: + data = json.load(res) + date = datetime.now() + date_padrao_hilmer = f"{date.month}-{date.day}-{date.year}-{date.hour}-{date.minute}-{date.second}" + + underlined_repo_name = repository_name[:16] + \ + repository_name[16:].replace('-', "_") + + filename = f"{prefix}-{underlined_repo_name}-{date_padrao_hilmer}-{ref_name}.json" + print(filename) + with open(filename, "w") as file: + json.dump(data, file) + + +if __name__ == "__main__": + generate_metrics() \ No newline at end of file diff --git a/sonar_scripts/parser.py b/sonar_scripts/parser.py deleted file mode 100644 index a3b9211f..00000000 --- a/sonar_scripts/parser.py +++ /dev/null @@ -1,193 +0,0 @@ -import json -import requests -import sys -from datetime import datetime -import requests -# import datetime -import pandas as pd -import os -from packaging import version -from dotenv import load_dotenv - -###################################### -# DECLARAÇÃO DE CONSTANTES/VARIÁVEIS # -###################################### -TODAY = datetime.now() - -load_dotenv() -# Variáveis globais ao repositório -OWNER = "fga-eps-mds" -REPO = os.getenv('REPO') -REPO_ISSUES = os.getenv('REPO_DOC') - -# Configurar as variáveis de ambiente -GITHUB_TOKEN = os.getenv('GITHUB_TOKEN') -RELEASE_MAJOR = os.getenv('RELEASE_MAJOR') -RELEASE_MINOR = os.getenv('RELEASE_MINOR') -RELEASE_FIX = os.getenv('RELEASE_FIX') -DEVELOP = os.getenv('DEVELOP') - -METRICS_SONAR = [ - "files", - "functions", - "complexity", - "comment_lines_density", - "duplicated_lines_density", - "coverage", - "ncloc", - "tests", - "test_errors", - "test_failures", - "test_execution_time", - "security_rating", -] - -BASE_URL_SONAR = "https://sonarcloud.io/api/measures/component_tree?component=fga-eps-mds_" - -# Utilize a api que for necessária -# api_url_workflows = f"https://api.github.com/repos/{owner}/{repo}/actions/workflows" -# api_url_jobs = f"https://api.github.com/repos/{owner}/{repo}/actions/runs/3624383254/jobs" -# api_url_deployments = f"https://api.github.com/repos/{owner}/{repo}/deployments" -api_url_runs = f"https://api.github.com/repos/{OWNER}/{REPO}/actions/runs" -api_url_issues = f"https://api.github.com/repos/{OWNER}/{REPO_ISSUES}/issues" - -################### -# FUNÇÕES RELEASE # -################### -# Pega a última release -def get_latest_release(): - url = f'https://api.github.com/repos/{OWNER}/{REPO}/releases' - headers = { - 'Authorization': f'token {GITHUB_TOKEN}' - } - response = requests.get(url, headers=headers) - releases = response.json() - - if releases: - return releases[0].get('tag_name', '0.0.0') - return '0.0.0' - -# Cria um novo nome de tag -def new_tag_name(): - old_tag = get_latest_release() - try: - old_version = version.parse(old_tag) - except version.InvalidVersion: - old_version = version.parse('0.0.0') - - if RELEASE_MAJOR == 'true': - return f'{old_version.major + 1}.0.0' - elif RELEASE_MINOR == 'true': - return f'{old_version.major}.{old_version.minor + 1}.0' - elif RELEASE_FIX == 'true': - return f'{old_version.major}.{old_version.minor}.{old_version.micro + 1}' - else: - return f'{old_version.major}.{old_version.minor}.{old_version.micro + 1}' - -# Cria a nova release -def create_release(): - tag = new_tag_name() - url = f'https://api.github.com/repos/{OWNER}/{REPO}/releases' - headers = { - 'Authorization': f'token {GITHUB_TOKEN}', - 'Accept': 'application/vnd.github.v3+json' - } - payload = { - 'tag_name': tag, - 'name': tag - } - response = requests.post(url, headers=headers, json=payload) - res_data = response.json() - return res_data.get('upload_url'), tag - -################# -# FUNÇÕES SONAR # -################# - -def save_sonar_metrics(tag): - response = requests.get(f'{BASE_URL_SONAR}{REPO}&metricKeys={",".join(METRICS_SONAR)}&ps=500') - - j = json.loads(response.text) - - print("Extração do Sonar concluída.") - - file_path = f'./analytics-raw-data/fga-eps-mds-{REPO}-{TODAY.strftime("%m-%d-%Y-%H-%M-%S")}-{tag}.json' - - with open(file_path, 'w') as fp: - fp.write(json.dumps(j)) - fp.close() - - return - -################## -# FUNÇÕES GITHUB # -################## - -def all_request_pages(data): - total_runs = data["total_count"] - pages = (total_runs // 100) + (1 if total_runs % 100 > 0 else 0) - for i in range(pages+1): - if i == 0 or i == 1: - continue - api_url_now = api_url_runs + "?page=" + str(i) - response = requests.get(api_url_now) - for j in ((response.json()['workflow_runs'])): - data['workflow_runs'].append(j) - return data - -def filter_request_per_date(data, date): - data_filtered = [] - for i in data["workflow_runs"]: - if datetime.strptime(i["created_at"][:10],"%Y-%m-%d").strftime("%Y-%m-%d") == date: - data_filtered.append(i) - return {"workflow_runs": data_filtered} - -def save_github_metrics_runs(): - response = requests.get(api_url_runs, params={'per_page': 100,}) - - data = response.json() - - # date = datetime.strptime("2023-03-23","%Y-%m-%d").strftime("%Y-%m-%d") - data = all_request_pages(data) - - print("Quantidade de workflow_runs: " + str(len(data["workflow_runs"]))) - - file_path = f'./analytics-raw-data/GitHub_API-Runs-fga-eps-mds-{REPO}-{TODAY.strftime("%m-%d-%Y-%H-%M-%S")}.json' - - # Salva os dados em um json file - with open(file_path, 'w') as fp: - fp.write(json.dumps(data)) - fp.close() - - return - -def save_github_metrics_issues(): - issues = [] - page = 1 - - while True: - response = requests.get(api_url_issues, params={'state': 'all', 'per_page': 100, 'page': page}) - - page_issues = response.json() - if not page_issues: - break - - issues.extend(page_issues) - print(f"Página {page}: {len(page_issues)} issues carregadas.") - - page += 1 - - print("Quantidade total de issues: " + str(len(issues))) - - file_path = f'./analytics-raw-data/GitHub_API-Issues-fga-eps-mds-{REPO_ISSUES}.json' - - # Salvar todas as issues em um arquivo JSON - with open(file_path, 'w') as fp: - json.dump(issues, fp, indent=4) - -if __name__ == "__main__": - _, tag = create_release() - - save_sonar_metrics(tag) - save_github_metrics_runs() - save_github_metrics_issues() \ No newline at end of file