diff --git a/.cursorrules b/.cursorrules new file mode 100644 index 0000000000000000000000000000000000000000..c40b1fb10d8687aba8468993ffe5149beae6c89a --- /dev/null +++ b/.cursorrules @@ -0,0 +1 @@ +# Project Overview This project, named GPT-Researcher, LLM based autonomous agent that conducts local and web research on any topic and generates a comprehensive report with citations, is built using Next.js and TypeScript. It integrates various libraries for their strenghts. Your primary goal is to help with Next.js app router patterns, TypeScript type safety, Tailwind CSS best practices, code quality standards, and Python/FastAPI backend optimizations. # Key URLs - Project Home Page: https://gptr.dev/ - GitHub Repository: https://github.com/assafelovic/gpt-researcher - Documentation: https://docs.gptr.dev/ # Project Structure - Frontend user interface built with Next.js, TypeScript, and Tailwind CSS in `/frontend` - Static FastAPI version for lightweight deployments - Next.js version for production use with enhanced features - Multi-agent research system using LangChain and LangGraph in `/backend/multi_agents` - Browser, Editor, Researcher, Reviewer, Revisor, Writer, and Publisher agents - Task configuration and agent coordination - Document processing using Unstructured and PyMuPDF in `/backend/document_processing` - PDF, DOCX, and web content parsing - Text extraction and preprocessing - Report generation using LangChain and Jinja2 templates in `/backend/report_generation` - Template-based report structuring - Dynamic content formatting - Multiple output formats in `/backend/output_formats` - PDF via md2pdf - Markdown via mistune - DOCX via python-docx - Format conversion utilities - Export functionality - GPT Researcher core functionality in `/gpt_researcher` - Web scraping and content aggregation - Research planning and execution - Source validation and tracking - Query processing and response generation - Testing infrastructure in `/tests` - Unit tests for individual components - Integration tests for agent interactions - End-to-end research workflow tests - Mock data and fixtures for testing # Language Model Configuration - Default model: gpt-4-turbo - Alternative models: gpt-3.5-turbo, claude-3-opus - Temperature settings for different tasks - Context window management - Token limit handling - Cost optimization strategies # Error Handling - Research failure recovery - API rate limiting - Network timeout handling - Invalid input management - Source validation errors - Report generation failures # Performance - Parallel processing strategies - Caching mechanisms - Memory management - Response streaming - Resource allocation - Query optimization # Development Workflow - Branch naming conventions - Commit message format - PR review process - Testing requirements - Documentation updates - Version control guidelines # API Documentation - REST endpoints - WebSocket events - Request/Response formats - Authentication methods - Rate limits - Error codes # Monitoring - Performance metrics - Error tracking - Usage statistics - Cost monitoring - Research quality metrics - User feedback tracking # Frontend Components - Static FastAPI version for lightweight deployments - Next.js version for production use with enhanced features # Backend Components - Multi-agent system architecture - Document processing pipeline - Report generation system - Output format handlers # Core Research Components - Web scraping and aggregation - Research planning and execution - Source validation - Query processing # Testing - Unit tests - Integration tests - End-to-end tests - Performance testing # Rule Violation Monitoring - Alert developer when changes conflict with project structure - Warn about deviations from coding standards - Flag unauthorized framework or library additions - Monitor for security and performance anti-patterns - Track API usage patterns that may violate guidelines - Report TypeScript strict mode violations - Identify accessibility compliance issues # Development Guidelines - Use TypeScript with strict mode enabled - Follow ESLint and Prettier configurations - Ensure components are responsive and accessible - Use Tailwind CSS for styling, following the project's design system - Minimize AI-generated comments, prefer self-documenting code - Follow React best practices and hooks guidelines - Validate all user inputs and API responses - Use existing components as reference implementations # Important Scripts - `npm run dev`: Start development server - `npm run build`: Build for production - `npm run test`: Run test suite - `python -m pytest`: Run Python tests - `docker-compose up`: Start all services - `docker-compose run gpt-researcher-tests`: Run test suite in container - `python -m uvicorn backend.server.server:app --host=0.0.0.0 --port=8000`: Start FastAPI server - `python -m uvicorn backend.server.server:app --reload`: Start FastAPI server with auto-reload for development - `python main.py`: Run the main application directly # AI Integration Guidelines - Prioritize type safety in all AI interactions - Follow LangChain and LangGraph best practices - Implement proper error handling for AI responses - Maintain context window limits - Handle rate limiting and API quotas - Validate AI outputs before processing - Log AI interactions for debugging # Lexicon - **GPT Researcher**: Autonomous research agent system - **Multi-Agent System**: Coordinated AI agents for research tasks - **Research Pipeline**: End-to-end research workflow - **Agent Roles**: Browser, Editor, Researcher, Reviewer, Revisor, Writer, Publisher - **Source Validation**: Verification of research sources - **Report Generation**: Process of creating final research output # Additional Resources - [Next.js Documentation](https://nextjs.org/docs) - [TypeScript Handbook](https://www.typescriptlang.org/docs/) - [Tailwind CSS Documentation](https://tailwindcss.com/docs) - [LangChain Documentation](https://python.langchain.com/docs/) - [FastAPI Documentation](https://fastapi.tiangolo.com/) - [Project Documentation](https://docs.gptr.dev/) End all your comments with a :-) symbol. \ No newline at end of file diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000000000000000000000000000000000..e8f991d7a2f6a2b81cef8500f3f98b6b2c8323c9 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,2 @@ +.git +output/ diff --git a/.env b/.env new file mode 100644 index 0000000000000000000000000000000000000000..fff6fe6a7013ba439536710f5134e404123280a2 --- /dev/null +++ b/.env @@ -0,0 +1,8 @@ +GOOGLE_API_KEY=AIzaSyCISHY92IzU60M8Jf0qCWIRCyhGUAj_haU +FAST_LLM="google_genai:gemini-1.5-flash" +SMART_LLM="google_genai:gemini-1.5-pro" +STRATEGIC_LLM="google_genai:gemini-1.5-pro" + +EMBEDDING="google_genai:models/text-embedding-004" + +TAVILY_API_KEY=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 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..3805f41a617a034db2ef4402c166ae0ef75a1f10 100644 --- a/.gitattributes +++ b/.gitattributes @@ -33,3 +33,24 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +docs/blog/2023-09-22-gpt-researcher/architecture.png filter=lfs diff=lfs merge=lfs -text +docs/blog/2023-09-22-gpt-researcher/planner.jpeg filter=lfs diff=lfs merge=lfs -text +docs/blog/2024-05-19-gptr-langgraph/blog-langgraph.jpeg filter=lfs diff=lfs merge=lfs -text +docs/blog/2024-09-7-hybrid-research/gptr-hybrid.png filter=lfs diff=lfs merge=lfs -text +docs/docs/gpt-researcher/context/gptr-hybrid.png filter=lfs diff=lfs merge=lfs -text +docs/static/img/architecture.png filter=lfs diff=lfs merge=lfs -text +docs/static/img/leaderboard.png filter=lfs diff=lfs merge=lfs -text +frontend/nextjs/public/img/agents/academicResearchAgentAvatar.png filter=lfs diff=lfs merge=lfs -text +frontend/nextjs/public/img/agents/businessAnalystAgentAvatar.png filter=lfs diff=lfs merge=lfs -text +frontend/nextjs/public/img/agents/computerSecurityanalystAvatar.png filter=lfs diff=lfs merge=lfs -text +frontend/nextjs/public/img/agents/financeAgentAvatar.png filter=lfs diff=lfs merge=lfs -text +frontend/nextjs/public/img/agents/mathAgentAvatar.png filter=lfs diff=lfs merge=lfs -text +frontend/nextjs/public/img/agents/travelAgentAvatar.png filter=lfs diff=lfs merge=lfs -text +frontend/nextjs/public/img/gptr-logo.png filter=lfs diff=lfs merge=lfs -text +frontend/static/academicResearchAgentAvatar.png filter=lfs diff=lfs merge=lfs -text +frontend/static/businessAnalystAgentAvatar.png filter=lfs diff=lfs merge=lfs -text +frontend/static/computerSecurityanalystAvatar.png filter=lfs diff=lfs merge=lfs -text +frontend/static/financeAgentAvatar.png filter=lfs diff=lfs merge=lfs -text +frontend/static/mathAgentAvatar.png filter=lfs diff=lfs merge=lfs -text +frontend/static/travelAgentAvatar.png filter=lfs diff=lfs merge=lfs -text +tests/docs/doc.pdf filter=lfs diff=lfs merge=lfs -text diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000000000000000000000000000000000000..6867cf8d2f6c61215f32a329b4b28831ac65d94a --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,38 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: '' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Desktop (please complete the following information):** + - OS: [e.g. iOS] + - Browser [e.g. chrome, safari] + - Version [e.g. 22] + +**Smartphone (please complete the following information):** + - Device: [e.g. iPhone6] + - OS: [e.g. iOS8.1] + - Browser [e.g. stock browser, safari] + - Version [e.g. 22] + +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000000000000000000000000000000000000..72718d5aa63a292159351ae852c305fec1880a93 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: '' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000000000000000000000000000000000..9b9ce933c5319f03b1d0b1b91ab59ca2705787e9 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,15 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + - package-ecosystem: "pip" # See documentation for possible values + directory: "/" # Location of package manifests + schedule: + interval: "weekly" + - package-ecosystem: "docker" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml new file mode 100644 index 0000000000000000000000000000000000000000..9ac322d211ec437d1110ffddfe077b764c2f5f80 --- /dev/null +++ b/.github/workflows/docker-build.yml @@ -0,0 +1,45 @@ +name: GPTR tests +run-name: ${{ github.actor }} ran the GPTR tests flow +permissions: + contents: read + pull-requests: write +on: + workflow_dispatch: # Add this line to enable manual triggering + # pull_request: + # types: [opened, synchronize] + +jobs: + docker: + runs-on: ubuntu-latest + environment: tests # Specify the environment to use for this job + env: + # Ensure these environment variables are set for the entire job + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + TAVILY_API_KEY: ${{ secrets.TAVILY_API_KEY }} + LANGCHAIN_API_KEY: ${{ secrets.LANGCHAIN_API_KEY }} + steps: + - name: Git checkout + uses: actions/checkout@v3 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + with: + driver: docker + + # - name: Build Docker images + # uses: docker/build-push-action@v4 + # with: + # push: false + # tags: gptresearcher/gpt-researcher:latest + # file: Dockerfile + + - name: Set up Docker Compose + run: | + sudo curl -L "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose + sudo chmod +x /usr/local/bin/docker-compose + - name: Run tests with Docker Compose + run: | + docker-compose --profile test run --rm gpt-researcher-tests \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..3e12fc8bd05b4bbc840131282e175b845aa59787 --- /dev/null +++ b/.gitignore @@ -0,0 +1,53 @@ +#Ignore env containing secrets +.env +.venv +.envrc + +#Ignore Virtual Env +env/ +venv/ +.venv/ + +# Other Environments +ENV/ +env.bak/ +venv.bak/ + +#Ignore generated outputs +outputs/ +*.lock +dist/ +gpt_researcher.egg-info/ + +#Ignore my local docs +my-docs/ + +#Ignore pycache +**/__pycache__/ + +#Ignore mypy cache +.mypy_cache/ +node_modules +.idea +.DS_Store +.docusaurus +build +docs/build + +.vscode/launch.json +.langgraph-data/ +.next/ +package-lock.json + +#Vim swp files +*.swp + +# Log files +logs/ +*.orig +*.log +server_log.txt + +#Cursor Rules +.cursorrules +CURSOR_RULES.md \ No newline at end of file diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000000000000000000000000000000000..a910a5ef95c183d29beac86da972d3d396af7658 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,123 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We, as members, contributors, and leaders, pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, religion, sexual identity, or +orientation. + +We commit to acting and interacting in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +- Demonstrating empathy and kindness toward others +- Being respectful of differing opinions, viewpoints, and experiences +- Giving and gracefully accepting constructive feedback +- Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience +- Focusing on what is best not just for us as individuals, but for the + overall community + +Examples of unacceptable behavior include: + +- The use of sexualized language or imagery, and sexual attention or + advances of any kind +- Trolling, insulting or derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or email address, without their explicit permission +- Other conduct that could reasonably be considered inappropriate in a professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior deemed inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that do not +align with this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies to all community spaces and also applies when +an individual is officially representing the community in public spaces. +Examples include using an official email address, posting via an official +social media account, or acting as an appointed representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +[Assaf.elovic@gmail.com](mailto:Assaf.elovic@gmail.com). +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series +of actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period. This includes +avoiding interactions in community spaces and external channels like social media. +Violating these terms may lead to a temporary or permanent ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any interaction or public +communication with the community for a specified period. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of groups of individuals. + +**Consequence**: A permanent ban from any public interaction within +the community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.0, available at +https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +Community Impact Guidelines were inspired by [Mozilla's code of conduct +enforcement ladder](https://github.com/mozilla/diversity). + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see the FAQ at +https://www.contributor-covenant.org/faq. Translations are available at +https://www.contributor-covenant.org/translations. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000000000000000000000000000000000000..f63123e38edf69bc5773bdab226fe4710a6cfb37 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,42 @@ +# Contributing to GPT Researcher + +First off, we'd like to welcome you and thank you for your interest and effort in contributing to our open-source project ❤️. Contributions of all forms are welcome—from new features and bug fixes to documentation and more. + +We are on a mission to build the #1 AI agent for comprehensive, unbiased, and factual research online, and we need your support to achieve this grand vision. + +Please take a moment to review this document to make the contribution process easy and effective for everyone involved. + +## Reporting Issues + +If you come across any issue or have an idea for an improvement, don't hesitate to create an issue on GitHub. Describe your problem in sufficient detail, providing as much relevant information as possible. This way, we can reproduce the issue before attempting to fix it or respond appropriately. + +## Contributing Code + +1. **Fork the repository and create your branch from `master`.** + If it’s not an urgent bug fix, branch from `master` and work on the feature or fix there. + +2. **Make your changes.** + Implement your changes following best practices for coding in the project's language. + +3. **Test your changes.** + Ensure that your changes pass all tests if any exist. If the project doesn’t have automated tests, test your changes manually to confirm they behave as expected. + +4. **Follow the coding style.** + Ensure your code adheres to the coding conventions used throughout the project, including indentation, accurate comments, etc. + +5. **Commit your changes.** + Make your Git commits informative and concise. This is very helpful for others when they look at the Git log. + +6. **Push to your fork and submit a pull request.** + When your work is ready and passes tests, push your branch to your fork of the repository and submit a pull request from there. + +7. **Pat yourself on the back and wait for review.** + Your work is done, congratulations! Now sit tight. The project maintainers will review your submission as soon as possible. They might suggest changes or ask for improvements. Both constructive conversation and patience are key to the collaboration process. + +## Documentation + +If you would like to contribute to the project's documentation, please follow the same steps: fork the repository, make your changes, test them, and submit a pull request. + +Documentation is a vital part of any software. It's not just about having good code; ensuring that users and contributors understand what's going on, how to use the software, or how to contribute is crucial. + +We're grateful for all our contributors, and we look forward to building the world's leading AI research agent hand-in-hand with you. Let's harness the power of open source and AI to change the world together! diff --git a/CURSOR_RULES.md b/CURSOR_RULES.md new file mode 100644 index 0000000000000000000000000000000000000000..673567484a18c82f23e2c6787eeb493fa0913fd3 --- /dev/null +++ b/CURSOR_RULES.md @@ -0,0 +1,181 @@ +> **Note**: This is a readable copy of the `.cursorrules` file maintained for legibility. The actual rules are implemented from the `.cursorrules` file in the root directory. + +# GPT-Researcher Cursor Rules + +## Project Overview +This project, named GPT-Researcher, is an LLM-based autonomous agent that conducts local and web research on any topic and generates a comprehensive report with citations. It is built using Next.js and TypeScript, integrating various libraries for their strengths. + +Your primary goal is to help with: +- Next.js app router patterns +- TypeScript type safety +- Tailwind CSS best practices +- Code quality standards +- Python/FastAPI backend optimizations + +## Key URLs +- Project Home Page: https://gptr.dev/ +- GitHub Repository: https://github.com/assafelovic/gpt-researcher +- Documentation: https://docs.gptr.dev/ + +## Project Structure +- Frontend user interface built with Next.js, TypeScript, and Tailwind CSS in `/frontend` + - Static FastAPI version for lightweight deployments + - Next.js version for production use with enhanced features + +- Multi-agent research system using LangChain and LangGraph in `/backend/multi_agents` + - Browser, Editor, Researcher, Reviewer, Revisor, Writer, and Publisher agents + - Task configuration and agent coordination + +- Document processing using Unstructured and PyMuPDF in `/backend/document_processing` + - PDF, DOCX, and web content parsing + - Text extraction and preprocessing + +- Report generation using LangChain and Jinja2 templates in `/backend/report_generation` + - Template-based report structuring + - Dynamic content formatting + +- Multiple output formats in `/backend/output_formats` + - PDF via md2pdf + - Markdown via mistune + - DOCX via python-docx + - Format conversion utilities + - Export functionality + +- GPT Researcher core functionality in `/gpt_researcher` + - Web scraping and content aggregation + - Research planning and execution + - Source validation and tracking + - Query processing and response generation + +- Testing infrastructure in `/tests` + - Unit tests for individual components + - Integration tests for agent interactions + - End-to-end research workflow tests + - Mock data and fixtures for testing + +## Language Model Configuration +- Default model: gpt-4-turbo +- Alternative models: gpt-3.5-turbo, claude-3-opus +- Temperature settings for different tasks +- Context window management +- Token limit handling +- Cost optimization strategies + +## Error Handling +- Research failure recovery +- API rate limiting +- Network timeout handling +- Invalid input management +- Source validation errors +- Report generation failures + +## Performance +- Parallel processing strategies +- Caching mechanisms +- Memory management +- Response streaming +- Resource allocation +- Query optimization + +## Development Workflow +- Branch naming conventions +- Commit message format +- PR review process +- Testing requirements +- Documentation updates +- Version control guidelines + +## API Documentation +- REST endpoints +- WebSocket events +- Request/Response formats +- Authentication methods +- Rate limits +- Error codes + +## Monitoring +- Performance metrics +- Error tracking +- Usage statistics +- Cost monitoring +- Research quality metrics +- User feedback tracking + +## Frontend Components +- Static FastAPI version for lightweight deployments +- Next.js version for production use with enhanced features + +## Backend Components +- Multi-agent system architecture +- Document processing pipeline +- Report generation system +- Output format handlers + +## Core Research Components +- Web scraping and aggregation +- Research planning and execution +- Source validation +- Query processing + +## Testing +- Unit tests +- Integration tests +- End-to-end tests +- Performance testing + +## Rule Violation Monitoring +- Alert developer when changes conflict with project structure +- Warn about deviations from coding standards +- Flag unauthorized framework or library additions +- Monitor for security and performance anti-patterns +- Track API usage patterns that may violate guidelines +- Report TypeScript strict mode violations +- Identify accessibility compliance issues + +## Development Guidelines +- Use TypeScript with strict mode enabled +- Follow ESLint and Prettier configurations +- Ensure components are responsive and accessible +- Use Tailwind CSS for styling, following the project's design system +- Minimize AI-generated comments, prefer self-documenting code +- Follow React best practices and hooks guidelines +- Validate all user inputs and API responses +- Use existing components as reference implementations + +## Important Scripts +- `npm run dev`: Start development server +- `npm run build`: Build for production +- `npm run test`: Run test suite +- `python -m pytest`: Run Python tests +- `python -m uvicorn backend.server.server:app --host=0.0.0.0 --port=8000`: Start FastAPI server +- `python -m uvicorn backend.server.server:app --reload`: Start FastAPI server with auto-reload for development +- `python main.py`: Run the main application directly +- `docker-compose up`: Start all services +- `docker-compose run gpt-researcher-tests`: Run test suite in container + +## AI Integration Guidelines +- Prioritize type safety in all AI interactions +- Follow LangChain and LangGraph best practices +- Implement proper error handling for AI responses +- Maintain context window limits +- Handle rate limiting and API quotas +- Validate AI outputs before processing +- Log AI interactions for debugging + +## Lexicon +- **GPT Researcher**: Autonomous research agent system +- **Multi-Agent System**: Coordinated AI agents for research tasks +- **Research Pipeline**: End-to-end research workflow +- **Agent Roles**: Browser, Editor, Researcher, Reviewer, Revisor, Writer, Publisher +- **Source Validation**: Verification of research sources +- **Report Generation**: Process of creating final research output + +## Additional Resources +- [Next.js Documentation](https://nextjs.org/docs) +- [TypeScript Handbook](https://www.typescriptlang.org/docs/) +- [Tailwind CSS Documentation](https://tailwindcss.com/docs) +- [LangChain Documentation](https://python.langchain.com/docs/) +- [FastAPI Documentation](https://fastapi.tiangolo.com/) +- [Project Documentation](https://docs.gptr.dev/) + +_Note: End all your comments with a :-) symbol._ \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..637b185027a00fc2ed8bef793f05b531cf1da872 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,46 @@ +# Stage 1: Browser and build tools installation +FROM python:3.11.4-slim-bullseye AS install-browser + +# Install Chromium, Chromedriver, Firefox, Geckodriver, and build tools in one layer +RUN apt-get update && \ + apt-get satisfy -y "chromium, chromium-driver (>= 115.0)" && \ + apt-get install -y --no-install-recommends firefox-esr wget build-essential && \ + wget https://github.com/mozilla/geckodriver/releases/download/v0.33.0/geckodriver-v0.33.0-linux64.tar.gz && \ + tar -xvzf geckodriver-v0.33.0-linux64.tar.gz && \ + chmod +x geckodriver && \ + mv geckodriver /usr/local/bin/ && \ + rm geckodriver-v0.33.0-linux64.tar.gz && \ + chromium --version && chromedriver --version && \ + rm -rf /var/lib/apt/lists/* # Clean up apt lists to reduce image size + +# Stage 2: Python dependencies installation +FROM install-browser AS gpt-researcher-install + +ENV PIP_ROOT_USER_ACTION=ignore +WORKDIR /usr/src/app + +# Copy and install Python dependencies in a single layer to optimize cache usage +COPY ./requirements.txt ./requirements.txt +COPY ./multi_agents/requirements.txt ./multi_agents/requirements.txt + +RUN pip install --no-cache-dir -r requirements.txt && \ + pip install --no-cache-dir -r multi_agents/requirements.txt + +# Stage 3: Final stage with non-root user and app +FROM gpt-researcher-install AS gpt-researcher + +# Create a non-root user for security +RUN useradd -ms /bin/bash gpt-researcher && \ + chown -R gpt-researcher:gpt-researcher /usr/src/app + +USER gpt-researcher +WORKDIR /usr/src/app + +# Copy the rest of the application files with proper ownership +COPY --chown=gpt-researcher:gpt-researcher ./ ./ + +# Expose the application's port +EXPOSE 8000 + +# Define the default command to run the application +CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..29f81d812f3e768fa89638d1f72920dbfd1413a8 --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/Procfile b/Procfile new file mode 100644 index 0000000000000000000000000000000000000000..b0c6683b009a4a144de9af5bcfd4b280789be431 --- /dev/null +++ b/Procfile @@ -0,0 +1 @@ +web: python -m uvicorn backend.server.server:app --host=0.0.0.0 --port=${PORT} \ No newline at end of file diff --git a/README-ja_JP.md b/README-ja_JP.md new file mode 100644 index 0000000000000000000000000000000000000000..e806861e0d5941f01176b8ff47110e16bfb6d3ef --- /dev/null +++ b/README-ja_JP.md @@ -0,0 +1,159 @@ +
+ +Logo + + +#### + +[![公式サイト](https://img.shields.io/badge/公式サイト-gptr.dev-blue?style=for-the-badge&logo=world&logoColor=white)](https://gptr.dev) +[![Documentation](https://img.shields.io/badge/Documentation-DOCS-f472b6?logo=googledocs&logoColor=white&style=for-the-badge)](https://docs.gptr.dev) +[![Discord Follow](https://img.shields.io/discord/1127851779011391548?style=for-the-badge&logo=discord&label=Chat%20on%20Discord)](https://discord.gg/QgZXvJAccX) + +[![PyPI version](https://img.shields.io/pypi/v/gpt-researcher?logo=pypi&logoColor=white&style=flat)](https://badge.fury.io/py/gpt-researcher) +![GitHub Release](https://img.shields.io/github/v/release/assafelovic/gpt-researcher?style=flat&logo=github) +[![Open In Colab](https://img.shields.io/static/v1?message=Open%20in%20Colab&logo=googlecolab&labelColor=grey&color=yellow&label=%20&style=flat&logoSize=40)](https://colab.research.google.com/github/assafelovic/gpt-researcher/blob/master/docs/docs/examples/pip-run.ipynb) +[![Docker Image Version](https://img.shields.io/docker/v/elestio/gpt-researcher/latest?arch=amd64&style=flat&logo=docker&logoColor=white&color=1D63ED)](https://hub.docker.com/r/gptresearcher/gpt-researcher) +[![Twitter Follow](https://img.shields.io/twitter/follow/assaf_elovic?style=social)](https://twitter.com/assaf_elovic) + +[English](README.md) | +[中文](README-zh_CN.md) | +[日本語](README-ja_JP.md) | +[한국어](README-ko_KR.md) +
+ +# 🔎 GPT Researcher + +**GPT Researcher は、さまざまなタスクに対する包括的なオンラインリサーチのために設計された自律エージェントです。** + +このエージェントは、詳細で事実に基づいた偏りのない研究レポートを生成することができ、関連するリソース、アウトライン、およびレッスンに焦点を当てるためのカスタマイズオプションを提供します。最近の [Plan-and-Solve](https://arxiv.org/abs/2305.04091) および [RAG](https://arxiv.org/abs/2005.11401) 論文に触発され、GPT Researcher は速度、決定論、および信頼性の問題に対処し、同期操作ではなく並列化されたエージェント作業を通じてより安定したパフォーマンスと高速化を提供します。 + +**私たちの使命は、AIの力を活用して、個人や組織に正確で偏りのない事実に基づいた情報を提供することです。** + +## なぜGPT Researcherなのか? + +- 手動の研究タスクで客観的な結論を形成するには時間がかかることがあり、適切なリソースと情報を見つけるのに数週間かかることもあります。 +- 現在のLLMは過去の情報に基づいて訓練されており、幻覚のリスクが高く、研究タスクにはほとんど役に立ちません。 +- 現在のLLMは短いトークン出力に制限されており、長く詳細な研究レポート(2,000語以上)には不十分です。 +- Web検索を可能にするサービス(ChatGPT + Webプラグインなど)は、限られたリソースとコンテンツのみを考慮し、場合によっては表面的で偏った回答をもたらします。 +- Webソースの選択のみを使用すると、研究タスクの正しい結論を導く際にバイアスが生じる可能性があります。 + +## アーキテクチャ +主なアイデアは、「プランナー」と「実行」エージェントを実行することであり、プランナーは研究する質問を生成し、実行エージェントは生成された各研究質問に基づいて最も関連性の高い情報を探します。最後に、プランナーはすべての関連情報をフィルタリングおよび集約し、研究レポートを作成します。

+エージェントは、研究タスクを完了するために gpt-4o-mini と gpt-4o(128K コンテキスト)の両方を活用します。必要に応じてそれぞれを使用することでコストを最適化します。**平均的な研究タスクは完了するのに約3分かかり、コストは約0.1ドルです**。 + +
+ +
+ + +詳細説明: +* 研究クエリまたはタスクに基づいて特定のドメインエージェントを作成します。 +* 研究タスクに対する客観的な意見を形成する一連の研究質問を生成します。 +* 各研究質問に対して、与えられたタスクに関連する情報をオンラインリソースから収集するクローラーエージェントをトリガーします。 +* 各収集されたリソースについて、関連情報に基づいて要約し、そのソースを追跡します。 +* 最後に、すべての要約されたソースをフィルタリングおよび集約し、最終的な研究レポートを生成します。 + +## デモ +https://github.com/assafelovic/gpt-researcher/assets/13554167/a00c89a6-a295-4dd0-b58d-098a31c40fda + +## チュートリアル + - [動作原理](https://docs.gptr.dev/blog/building-gpt-researcher) + - [インストール方法](https://www.loom.com/share/04ebffb6ed2a4520a27c3e3addcdde20?sid=da1848e8-b1f1-42d1-93c3-5b0b9c3b24ea) + - [ライブデモ](https://www.loom.com/share/6a3385db4e8747a1913dd85a7834846f?sid=a740fd5b-2aa3-457e-8fb7-86976f59f9b8) + +## 特徴 +- 📝 研究、アウトライン、リソース、レッスンレポートを生成 +- 🌐 各研究で20以上のWebソースを集約し、客観的で事実に基づいた結論を形成 +- 🖥️ 使いやすいWebインターフェース(HTML/CSS/JS)を含む +- 🔍 JavaScriptサポート付きのWebソースをスクレイピング +- 📂 訪問および使用されたWebソースのコンテキストを追跡 +- 📄 研究レポートをPDF、Wordなどにエクスポート + +## 📖 ドキュメント + +完全なドキュメントについては、[こちら](https://docs.gptr.dev/docs/gpt-researcher/getting-started/getting-started)を参照してください: + +- 入門(インストール、環境設定、簡単な例) +- 操作例(デモ、統合、dockerサポート) +- 参考資料(API完全ドキュメント) +- Tavilyアプリケーションインターフェースの統合(コア概念の高度な説明) + +## クイックスタート +> **ステップ 0** - Python 3.11 以降をインストールします。[こちら](https://www.tutorialsteacher.com/python/install-python)を参照して、ステップバイステップのガイドを確認してください。 + +
+ +> **ステップ 1** - プロジェクトをダウンロードします + +```bash +$ git clone https://github.com/assafelovic/gpt-researcher.git +$ cd gpt-researcher +``` + +
+ +> **ステップ2** - 依存関係をインストールします +```bash +$ pip install -r requirements.txt +``` +
+ +> **ステップ 3** - OpenAI キーと Tavily API キーを使用して .env ファイルを作成するか、直接エクスポートします + +```bash +$ export OPENAI_API_KEY={Your OpenAI API Key here} +``` +```bash +$ export TAVILY_API_KEY={Your Tavily API Key here} +``` + +- **LLMには、[OpenAI GPT](https://platform.openai.com/docs/guides/gpt) を使用することをお勧めします**が、[Langchain Adapter](https://python.langchain.com/docs/guides/adapters/openai) がサポートする他の LLM モデル(オープンソースを含む)を使用することもできます。llm モデルとプロバイダーを config/config.py で変更するだけです。[このガイド](https://python.langchain.com/docs/integrations/llms/) に従って、LLM を Langchain と統合する方法を学んでください。 +- **検索エンジンには、[Tavily Search API](https://app.tavily.com)(LLM 用に最適化されています)を使用することをお勧めします**が、他の検索エンジンを選択することもできます。config/config.py で検索プロバイダーを「duckduckgo」、「googleAPI」、「googleSerp」、「searchapi」、「searx」に変更するだけです。次に、config.py ファイルに対応する env API キーを追加します。 +- **最適なパフォーマンスを得るために、[OpenAI GPT](https://platform.openai.com/docs/guides/gpt) モデルと [Tavily Search API](https://app.tavily.com) を使用することを強くお勧めします。** +
+ +> **ステップ 4** - FastAPI を使用してエージェントを実行します + +```bash +$ uvicorn main:app --reload +``` +
+ +> **ステップ 5** - 任意のブラウザで http://localhost:8000 にアクセスして、リサーチを楽しんでください! + +Docker の使い方や機能とサービスの詳細については、[ドキュメント](https://docs.gptr.dev) ページをご覧ください。 + +## 🚀 貢献 +私たちは貢献を大歓迎します!興味がある場合は、[貢献](CONTRIBUTING.md) をご覧ください。 + +私たちの[ロードマップ](https://trello.com/b/3O7KBePw/gpt-researcher-roadmap) ページを確認し、私たちの使命に参加することに興味がある場合は、[Discord コミュニティ](https://discord.gg/QgZXvJAccX) を通じてお問い合わせください。 + +## ✉️ サポート / お問い合わせ +- [コミュニティディスカッション](https://discord.gg/spBgZmm3Xe) +- 私たちのメール: support@tavily.com + +## 🛡 免責事項 + +このプロジェクト「GPT Researcher」は実験的なアプリケーションであり、明示または黙示のいかなる保証もなく「現状のまま」提供されます。私たちは学術目的のためにMITライセンスの下でコードを共有しています。ここに記載されている内容は学術的なアドバイスではなく、学術論文や研究論文での使用を推奨するものではありません。 + +私たちの客観的な研究主張に対する見解: +1. 私たちのスクレイピングシステムの主な目的は、不正確な事実を減らすことです。どうやって解決するのか?私たちがスクレイピングするサイトが多ければ多いほど、誤ったデータの可能性は低くなります。各研究で20の情報を収集し、それらがすべて間違っている可能性は非常に低いです。 +2. 私たちの目標はバイアスを排除することではなく、可能な限りバイアスを減らすことです。**私たちはここでコミュニティとして最も効果的な人間と機械の相互作用を探求しています**。 +3. 研究プロセスでは、人々も自分が研究しているトピックに対してすでに意見を持っているため、バイアスがかかりやすいです。このツールは多くの意見を収集し、偏った人が決して読まないであろう多様な見解を均等に説明します。 + +**GPT-4 言語モデルの使用は、トークンの使用により高額な費用がかかる可能性があることに注意してください**。このプロジェクトを利用することで、トークンの使用状況と関連する費用を監視および管理する責任があることを認めたことになります。OpenAI API の使用状況を定期的に確認し、予期しない料金が発生しないように必要な制限やアラートを設定することを強くお勧めします。 + +--- + +

+ + + + + Star History Chart + + +

diff --git a/README-ko_KR.md b/README-ko_KR.md new file mode 100644 index 0000000000000000000000000000000000000000..e8adfc52669dd4154da1dce4c686c28ec6fff619 --- /dev/null +++ b/README-ko_KR.md @@ -0,0 +1,242 @@ +
+ +Logo + + +#### + +[![Website](https://img.shields.io/badge/Official%20Website-gptr.dev-teal?style=for-the-badge&logo=world&logoColor=white&color=0891b2)](https://gptr.dev) +[![Documentation](https://img.shields.io/badge/Documentation-DOCS-f472b6?logo=googledocs&logoColor=white&style=for-the-badge)](https://docs.gptr.dev) +[![Discord Follow](https://img.shields.io/discord/1127851779011391548?style=for-the-badge&logo=discord&label=Chat%20on%20Discord)](https://discord.gg/QgZXvJAccX) + +[![PyPI version](https://img.shields.io/pypi/v/gpt-researcher?logo=pypi&logoColor=white&style=flat)](https://badge.fury.io/py/gpt-researcher) +![GitHub Release](https://img.shields.io/github/v/release/assafelovic/gpt-researcher?style=flat&logo=github) +[![Open In Colab](https://img.shields.io/static/v1?message=Open%20in%20Colab&logo=googlecolab&labelColor=grey&color=yellow&label=%20&style=flat&logoSize=40)](https://colab.research.google.com/github/assafelovic/gpt-researcher/blob/master/docs/docs/examples/pip-run.ipynb) +[![Docker Image Version](https://img.shields.io/docker/v/elestio/gpt-researcher/latest?arch=amd64&style=flat&logo=docker&logoColor=white&color=1D63ED)](https://hub.docker.com/r/gptresearcher/gpt-researcher) +[![Twitter Follow](https://img.shields.io/twitter/follow/assaf_elovic?style=social)](https://twitter.com/assaf_elovic) + +[English](README.md) | +[中文](README-zh_CN.md) | +[日本語](README-ja_JP.md) | +[한국어](README-ko_KR.md) +
+ +# 🔎 GPT Researcher + +**GPT Researcher는 다양한 작업을 대해 포괄적인 온라인 연구를 수행하도록 설계된 자율 에이전트입니다.** + +이 에이전트는 세부적이고 사실에 기반하며 편견 없는 연구 보고서를 생성할 수 있으며, 관련 리소스와 개요에 초점을 맞춘 맞춤형 옵션을 제공합니다. 최근 발표된 [Plan-and-Solve](https://arxiv.org/abs/2305.04091) 및 [RAG](https://arxiv.org/abs/2005.11401) 논문에서 영감을 받아 GPT Researcher는 잘못된 정보, 속도, 결정론적 접근 방식, 신뢰성 문제를 해결하고, 동기화 작업이 아닌 병렬 에이전트 작업을 통해 더 안정적이고 빠른 성능을 제공합니다. + +**우리의 목표는 AI의 힘을 활용하여 개인과 조직에게 정확하고 편향 없는 사실에 기반한 정보를 제공하는 것입니다.** + +## 왜 GPT Researcher인가? + +- 직접 수행하는 연구 과정은 객관적인 결론을 도출하는 데 시간이 오래 걸리며, 적절한 리소스와 정보를 찾는 데 몇 주가 걸릴 수 있습니다. +- 현재의 대규모 언어 모델(LLM)은 과거 정보에 기반해 훈련되었으며, 환각 현상이 발생할 위험이 높아 연구 작업에는 적합하지 않습니다. +- 현재 LLM은 짧은 토큰 출력으로 제한되며, 2,000단어 이상의 길고 자세한 연구 보고서를 작성하는 데는 충분하지 않습니다. +- 웹 검색을 지원하는 서비스(예: ChatGPT 또는 Perplexity)는 제한된 리소스와 콘텐츠만을 고려하여 경우에 따라 피상적이고 편향된 답변을 제공합니다. +- 웹 소스만을 사용하면 연구 작업에서 올바른 결론을 도출할 때 편향이 발생할 수 있습니다. + +## 데모 +https://github.com/user-attachments/assets/092e9e71-7e27-475d-8c4f-9dddd28934a3 + +## 아키텍처 +주요 아이디어는 "플래너"와 "실행" 에이전트를 실행하는 것으로, 플래너는 연구할 질문을 생성하고, 실행 에이전트는 생성된 각 연구 질문에 따라 가장 관련성 높은 정보를 찾습니다. 마지막으로 플래너는 모든 관련 정보를 필터링하고 집계하여 연구 보고서를 작성합니다. +

+에이전트는 `gpt-4o-mini`와 `gpt-4o`(128K 컨텍스트)를 활용하여 연구 작업을 완료합니다. 필요에 따라 각각을 사용하여 비용을 최적화합니다. **평균 연구 작업은 약 2분이 소요되며, 비용은 약 $0.005입니다.**. + +
+ +
+ +구체적으로: +* 연구 쿼리 또는 작업을 기반으로 도메인별 에이전트를 생성합니다. +* 주어진 작업에 대해 객관적인 의견을 형성할 수 있는 일련의 연구 질문을 생성합니다. +* 각 연구 질문에 대해 크롤러 에이전트를 실행하여 작업과 관련된 정보를 온라인 리소스에서 수집합니다. +* 수집된 각 리소스에서 관련 정보를 요약하고 출처를 기록합니다. +* 마지막으로, 요약된 모든 정보를 필터링하고 집계하여 최종 연구 보고서를 생성합니다. + +## 튜토리얼 + - [동작원리](https://docs.gptr.dev/blog/building-gpt-researcher) + - [설치방법](https://www.loom.com/share/04ebffb6ed2a4520a27c3e3addcdde20?sid=da1848e8-b1f1-42d1-93c3-5b0b9c3b24ea) + - [라이브 데모](https://www.loom.com/share/6a3385db4e8747a1913dd85a7834846f?sid=a740fd5b-2aa3-457e-8fb7-86976f59f9b8) + + +## 기능 +- 📝 로컬 문서 및 웹 소스를 사용하여 연구, 개요, 리소스 및 학습 보고서 생성 +- 📜 2,000단어 이상의 길고 상세한 연구 보고서 생성 가능 +- 🌐 연구당 20개 이상의 웹 소스를 집계하여 객관적이고 사실에 기반한 결론 도출 +- 🖥️ 경량 HTML/CSS/JS와 프로덕션용 (NextJS + Tailwind) UX/UI 포함 +- 🔍 자바스크립트 지원 웹 소스 스크래핑 기능 +- 📂 연구 과정에서 맥락과 메모리 추적 및 유지 +- 📄 연구 보고서를 PDF, Word 등으로 내보내기 지원 + +## 📖 문서 + +전체 문서(설치, 환경 설정, 간단한 예시)를 보려면 [여기](https://docs.gptr.dev/docs/gpt-researcher/getting-started/getting-started)를 참조하세요. + +- 시작하기 (설치, 환경 설정, 간단한 예시) +- 맞춤 설정 및 구성 +- 사용 방법 예시 (데모, 통합, 도커 지원) +- 참고자료 (전체 API 문서) + +## ⚙️ 시작하기 +### 설치 +> **1단계** - Python 3.11 또는 그 이상의 버전을 설치하세요. [여기](https://www.tutorialsteacher.com/python/install-python)를 참조하여 단계별 가이드를 확인하세요. + +> **2단계** - 프로젝트를 다운로드하고 해당 디렉토리로 이동하세요. + +```bash +git clone https://github.com/assafelovic/gpt-researcher.git +cd gpt-researcher +``` + +> **3단계** - 두 가지 방법으로 API 키를 설정하세요: 직접 export하거나 `.env` 파일에 저장하세요. + +Linux/Windows에서 임시 설정을 하려면 export 방법을 사용하세요: + +```bash +export OPENAI_API_KEY={OpenAI API 키 입력} +export TAVILY_API_KEY={Tavily API 키 입력} +``` + +더 영구적인 설정을 원한다면, 현재의 `gpt-researcher` 디렉토리에 `.env` 파일을 생성하고 환경 변수를 입력하세요 (export 없이). + +- 기본 LLM은 [GPT](https://platform.openai.com/docs/guides/gpt)이지만, `claude`, `ollama3`, `gemini`, `mistral` 등 다른 LLM도 사용할 수 있습니다. LLM 제공자를 변경하는 방법은 [LLMs 문서](https://docs.gptr.dev/docs/gpt-researcher/llms/llms)를 참조하세요. 이 프로젝트는 OpenAI GPT 모델에 최적화되어 있습니다. +- 기본 검색기는 [Tavily](https://app.tavily.com)이지만, `duckduckgo`, `google`, `bing`, `searchapi`, `serper`, `searx`, `arxiv`, `exa` 등의 검색기를 사용할 수 있습니다. 검색 제공자를 변경하는 방법은 [검색기 문서](https://docs.gptr.dev/docs/gpt-researcher/retrievers)를 참조하세요. + +### 빠른 시작 + +> **1단계** - 필요한 종속성 설치 + +```bash +pip install -r requirements.txt +``` + +> **2단계** - FastAPI로 에이전트 실행 + +```bash +python -m uvicorn main:app --reload +``` + +> **3단계** - 브라우저에서 http://localhost:8000 으로 이동하여 연구를 시작하세요! + +
+ +**[Poetry](https://docs.gptr.dev/docs/gpt-researcher/getting-started/getting-started#poetry) 또는 [가상 환경](https://docs.gptr.dev/docs/gpt-researcher/getting-started/getting-started#virtual-environment)에 대해 배우고 싶다면, [문서](https://docs.gptr.dev/docs/gpt-researcher/getting-started/getting-started)를 참조하세요.** + +### PIP 패키지로 실행하기 +```bash +pip install gpt-researcher +``` + +```python +... +from gpt_researcher import GPTResearcher + +query = "왜 Nvidia 주식이 오르고 있나요?" +researcher = GPTResearcher(query=query, report_type="research_report") +# 주어진 질문에 대한 연구 수행 +research_result = await researcher.conduct_research() +# 보고서 작성 +report = await researcher.write_report() +... +``` + +**더 많은 예제와 구성 옵션은 [PIP 문서](https://docs.gptr.dev/docs/gpt-researcher/gptr/pip-package)를 참조하세요.** + +## Docker로 실행 + +> **1단계** - [Docker 설치](https://docs.gptr.dev/docs/gpt-researcher/getting-started/getting-started-with-docker) + +> **2단계** - `.env.example` 파일을 복사하고 API 키를 추가한 후, 파일을 `.env`로 저장하세요. + +> **3단계** - docker-compose 파일에서 실행하고 싶지 않은 서비스를 주석 처리하세요. + +```bash +$ docker-compose up --build +``` + +> **4단계** - docker-compose 파일에서 아무 것도 주석 처리하지 않았다면, 기본적으로 두 가지 프로세스가 시작됩니다: + - localhost:8000에서 실행 중인 Python 서버
+ - localhost:3000에서 실행 중인 React 앱
+ +브라우저에서 localhost:3000으로 이동하여 연구를 시작하세요! + +## 📄 로컬 문서로 연구하기 + +GPT Researcher를 사용하여 로컬 문서를 기반으로 연구 작업을 수행할 수 있습니다. 현재 지원되는 파일 형식은 PDF, 일반 텍스트, CSV, Excel, Markdown, PowerPoint, Word 문서입니다. + +1단계: `DOC_PATH` 환경 변수를 설정하여 문서가 있는 폴더를 지정하세요. + +```bash +export DOC_PATH="./my-docs" +``` + +2단계: + - 프론트엔드 앱을 localhost:8000에서 실행 중이라면, "Report Source" 드롭다운 옵션에서 "My Documents"를 선택하세요. + - GPT Researcher를 [PIP 패키지](https://docs.tavily.com/docs/gpt-researcher/pip-package)로 실행 중이라면, `report_source` 인수를 "local"로 설정하여 `GPTResearcher` 클래스를 인스턴스화하세요. [코드 예제](https://docs.gptr.dev/docs/gpt-researcher/context/tailored-research)를 참조하세요. + +## 👪 다중 에이전트 어시스턴트 + +AI가 프롬프트 엔지니어링 및 RAG에서 다중 에이전트 시스템으로 발전함에 따라, 우리는 [LangGraph](https://python.langchain.com/v0.1/docs/langgraph/)로 구축된 새로운 다중 에이전트 어시스턴트를 소개합니다. + +LangGraph를 사용하면 여러 에이전트의 전문 기술을 활용하여 연구 과정의 깊이와 질을 크게 향상시킬 수 있습니다. 최근 [STORM](https://arxiv.org/abs/2402.14207) 논문에서 영감을 받아, 이 프로젝트는 AI 에이전트 팀이 주제에 대한 연구를 계획에서 출판까지 함께 수행하는 방법을 보여줍니다. + +평균 실행은 5-6 페이지 분량의 연구 보고서를 PDF, Docx, Markdown 형식으로 생성합니다. + +[여기](https://github.com/assafelovic/gpt-researcher/tree/master/multi_agents)에서 확인하거나 [문서](https://docs.gptr.dev/docs/gpt-researcher/multi_agents/langgraph)에서 자세한 내용을 참조하세요. + +## 🖥️ 프론트엔드 애플리케이션 + +GPT-Researcher는 사용자 경험을 개선하고 연구 프로세스를 간소화하기 위해 향상된 프론트엔드를 제공합니다. 프론트엔드는 다음과 같은 기능을 제공합니다: + +- 연구 쿼리를 입력할 수 있는 직관적인 인터페이스 +- 연구 작업의 실시간 진행 상황 추적 +- 연구 결과의 대화형 디스플레이 +- 맞춤형 연구 경험을 위한 설정 가능 + +두 가지 배포 옵션이 있습니다: +1. FastAPI로 제공되는 경량 정적 프론트엔드 +2. 고급 기능을 제공하는 NextJS 애플리케이션 + +프론트엔드 기능에 대한 자세한 설치 방법 및 정보를 원하시면 [문서 페이지](https://docs.gptr.dev/docs/gpt-researcher/frontend/frontend)를 참조하세요. + +## 🚀 기여하기 +우리는 기여를 적극 환영합니다! 관심이 있다면 [기여 가이드](https://github.com/assafelovic/gpt-researcher/blob/master/CONTRIBUTING.md)를 확인해 주세요. + +[로드맵](https://trello.com/b/3O7KBePw/gpt-researcher-roadmap) 페이지를 확인하고, 우리 [Discord 커뮤니티](https://discord.gg/QgZXvJAccX)에 가입하여 우리의 목표에 함께 참여해 주세요. + + + + +## ✉️ 지원 / 문의 +- [커뮤니티 Discord](https://discord.gg/spBgZmm3Xe) +- 저자 이메일: assaf.elovic@gmail.com + +## 🛡️ 면책 조항 + +이 프로젝트인 GPT Researcher는 실험적인 응용 프로그램이며, 명시적이거나 묵시적인 보증 없이 "있는 그대로" 제공됩니다. 우리는 이 코드를 학술적 목적으로 Apache 2 라이선스 하에 공유하고 있습니다. 여기에 있는 것은 학술적 조언이 아니며, 학술 또는 연구 논문에 사용하는 것을 권장하지 않습니다. + +편향되지 않은 연구 주장에 대한 우리의 견해: +1. GPT Researcher의 주요 목표는 잘못된 정보와 편향된 사실을 줄이는 것입니다. 그 방법은 무엇일까요? 우리는 더 많은 사이트를 스크래핑할수록 잘못된 데이터의 가능성이 줄어든다고 가정합니다. 여러 사이트에서 정보를 스크래핑하고 가장 빈번한 정보를 선택하면, 모든 정보가 틀릴 확률은 매우 낮습니다. +2. 우리는 편향을 완전히 제거하려고 하지는 않지만, 가능한 한 줄이는 것을 목표로 합니다. **우리는 인간과 LLM의 가장 효과적인 상호작용을 찾기 위한 커뮤니티입니다.** +3. 연구에서 사람들도 이미 자신이 연구하는 주제에 대해 의견을 가지고 있기 때문에 편향되는 경향이 있습니다. 이 도구는 많은 의견을 스크래핑하며, 편향된 사람이라면 결코 읽지 않았을 다양한 견해를 고르게 설명합니다. + +**GPT-4 모델을 사용할 경우, 토큰 사용량 때문에 비용이 많이 들 수 있습니다.** 이 프로젝트를 사용하는 경우, 자신의 토큰 사용량 및 관련 비용을 모니터링하고 관리하는 것은 본인의 책임입니다. OpenAI API 사용량을 정기적으로 확인하고, 예상치 못한 비용을 방지하기 위해 필요한 한도를 설정하거나 알림을 설정하는 것이 좋습니다. + + +--- + +

+ + + + + Star History Chart + + +

diff --git a/README-zh_CN.md b/README-zh_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..b0f950f349fae29b7fe55809ed844815149768cd --- /dev/null +++ b/README-zh_CN.md @@ -0,0 +1,158 @@ +
+ +Logo + + +#### + +[![Website](https://img.shields.io/badge/Official%20Website-gptr.dev-teal?style=for-the-badge&logo=world&logoColor=white&color=0891b2)](https://gptr.dev) +[![Documentation](https://img.shields.io/badge/Documentation-DOCS-f472b6?logo=googledocs&logoColor=white&style=for-the-badge)](https://docs.gptr.dev) +[![Discord Follow](https://img.shields.io/discord/1127851779011391548?style=for-the-badge&logo=discord&label=Chat%20on%20Discord)](https://discord.gg/QgZXvJAccX) + +[![PyPI version](https://img.shields.io/pypi/v/gpt-researcher?logo=pypi&logoColor=white&style=flat)](https://badge.fury.io/py/gpt-researcher) +![GitHub Release](https://img.shields.io/github/v/release/assafelovic/gpt-researcher?style=flat&logo=github) +[![Open In Colab](https://img.shields.io/static/v1?message=Open%20in%20Colab&logo=googlecolab&labelColor=grey&color=yellow&label=%20&style=flat&logoSize=40)](https://colab.research.google.com/github/assafelovic/gpt-researcher/blob/master/docs/docs/examples/pip-run.ipynb) +[![Docker Image Version](https://img.shields.io/docker/v/elestio/gpt-researcher/latest?arch=amd64&style=flat&logo=docker&logoColor=white&color=1D63ED)](https://hub.docker.com/r/gptresearcher/gpt-researcher) +[![Twitter Follow](https://img.shields.io/twitter/follow/assaf_elovic?style=social)](https://twitter.com/assaf_elovic) + +[English](README.md) | +[中文](README-zh_CN.md) | +[日本語](README-ja_JP.md) | +[한국어](README-ko_KR.md) +
+ +# 🔎 GPT Researcher + +**GPT Researcher 是一个智能体代理,专为各种任务的综合在线研究而设计。** + +代理可以生成详细、正式且客观的研究报告,并提供自定义选项,专注于相关资源、结构框架和经验报告。受最近发表的[Plan-and-Solve](https://arxiv.org/abs/2305.04091) 和[RAG](https://arxiv.org/abs/2005.11401) 论文的启发,GPT Researcher 解决了速度、确定性和可靠性等问题,通过并行化的代理运行,而不是同步操作,提供了更稳定的性能和更高的速度。 + +**我们的使命是利用人工智能的力量,为个人和组织提供准确、客观和事实的信息。** + +## 为什么选择GPT Researcher? + +- 因为人工研究任务形成客观结论可能需要时间和经历,有时甚至需要数周才能找到正确的资源和信息。 +- 目前的LLM是根据历史和过时的信息进行训练的,存在严重的幻觉风险,因此几乎无法胜任研究任务。 +- 网络搜索的解决方案(例如 ChatGPT + Web 插件)仅考虑有限的资源和内容,在某些情况下会导致肤浅的结论或不客观的答案。 +- 只使用部分资源可能会在确定研究问题或任务的正确结论时产生偏差。 + +## 架构 +主要思想是运行“**计划者**”和“**执行**”代理,而**计划者**生成问题进行研究,“**执行**”代理根据每个生成的研究问题寻找最相关的信息。最后,“**计划者**”过滤和聚合所有相关信息并创建研究报告。

+代理同时利用 gpt-40-mini 和 gpt-4o(128K 上下文)来完成一项研究任务。我们仅在必要时使用这两种方法对成本进行优化。**研究任务平均耗时约 3 分钟,成本约为 ~0.1 美元**。 + +
+ +
+ + +详细说明: +* 根据研究搜索或任务创建特定领域的代理。 +* 生成一组研究问题,这些问题共同形成答案对任何给定任务的客观意见。 +* 针对每个研究问题,触发一个爬虫代理,从在线资源中搜索与给定任务相关的信息。 +* 对于每一个抓取的资源,根据相关信息进行汇总,并跟踪其来源。 +* 最后,对所有汇总的资料来源进行过滤和汇总,并生成最终研究报告。 + +## 演示 +https://github.com/assafelovic/gpt-researcher/assets/13554167/a00c89a6-a295-4dd0-b58d-098a31c40fda + +## 教程 + - [运行原理](https://docs.gptr.dev/blog/building-gpt-researcher) + - [如何安装](https://www.loom.com/share/04ebffb6ed2a4520a27c3e3addcdde20?sid=da1848e8-b1f1-42d1-93c3-5b0b9c3b24ea) + - [现场演示](https://www.loom.com/share/6a3385db4e8747a1913dd85a7834846f?sid=a740fd5b-2aa3-457e-8fb7-86976f59f9b8) + +## 特性 +- 📝 生成研究问题、大纲、资源和课题报告 +- 🌐 每项研究汇总超过20个网络资源,形成客观和真实的结论 +- 🖥️ 包括易于使用的web界面 (HTML/CSS/JS) +- 🔍 支持JavaScript网络资源抓取功能 +- 📂 追踪访问过和使用过的网络资源和来源 +- 📄 将研究报告导出为PDF或其他格式... + +## 📖 文档 + +请参阅[此处](https://docs.gptr.dev/docs/gpt-researcher/getting-started/getting-started),了解完整文档: + +- 入门(安装、设置环境、简单示例) +- 操作示例(演示、集成、docker 支持) +- 参考资料(API完整文档) +- Tavily 应用程序接口集成(核心概念的高级解释) + +## 快速开始 +> **步骤 0** - 安装 Python 3.11 或更高版本。[参见此处](https://www.tutorialsteacher.com/python/install-python) 获取详细指南。 + +
+ +> **步骤 1** - 下载项目 + +```bash +$ git clone https://github.com/assafelovic/gpt-researcher.git +$ cd gpt-researcher +``` + +
+ +> **步骤2** -安装依赖项 +```bash +$ pip install -r requirements.txt +``` +
+ +> **第 3 步** - 使用 OpenAI 密钥和 Tavily API 密钥创建 .env 文件,或直接导出该文件 + +```bash +$ export OPENAI_API_KEY={Your OpenAI API Key here} +``` +```bash +$ export TAVILY_API_KEY={Your Tavily API Key here} +``` + +- **LLM,我们推荐使用 [OpenAI GPT](https://platform.openai.com/docs/guides/gpt)**,但您也可以使用 [Langchain Adapter](https://python.langchain.com/docs/guides/adapters/openai) 支持的任何其他 LLM 模型(包括开源),只需在 config/config.py 中更改 llm 模型和提供者即可。请按照 [这份指南](https://python.langchain.com/docs/integrations/llms/) 学习如何将 LLM 与 Langchain 集成。 +- **对于搜索引擎,我们推荐使用 [Tavily Search API](https://app.tavily.com)(已针对 LLM 进行优化)**,但您也可以选择其他搜索引擎,只需将 config/config.py 中的搜索提供程序更改为 "duckduckgo"、"googleAPI"、"searchapi"、"googleSerp "或 "searx "即可。然后在 config.py 文件中添加相应的 env API 密钥。 +- **我们强烈建议使用 [OpenAI GPT](https://platform.openai.com/docs/guides/gpt) 模型和 [Tavily Search API](https://app.tavily.com) 以获得最佳性能。** +
+ +> **第 4 步** - 使用 FastAPI 运行代理 + +```bash +$ uvicorn main:app --reload +``` +
+ +> **第 5 步** - 在任何浏览器上访问 http://localhost:8000,享受研究乐趣! + +要了解如何开始使用 Docker 或了解有关功能和服务的更多信息,请访问 [documentation](https://docs.gptr.dev) 页面。 + +## 🚀 贡献 +我们非常欢迎您的贡献!如果您感兴趣,请查看 [contributing](CONTRIBUTING.md)。 + +如果您有兴趣加入我们的任务,请查看我们的 [路线图](https://trello.com/b/3O7KBePw/gpt-researcher-roadmap) 页面,并通过我们的 [Discord 社区](https://discord.gg/QgZXvJAccX) 联系我们。 + +## ✉️ 支持 / 联系我们 +- [社区讨论区](https://discord.gg/spBgZmm3Xe) +- 我们的邮箱: support@tavily.com + +## 🛡 免责声明 + +本项目 "GPT Researcher "是一个实验性应用程序,按 "现状 "提供,不做任何明示或暗示的保证。我们根据 MIT 许可分享用于学术目的的代码。本文不提供任何学术建议,也不建议在学术或研究论文中使用。 + +我们对客观研究主张的看法: +1. 我们抓取系统的全部目的是减少不正确的事实。如何解决?我们抓取的网站越多,错误数据的可能性就越小。我们每项研究都会收集20条信息,它们全部错误的可能性极低。 +2. 我们的目标不是消除偏见,而是尽可能减少偏见。**作为一个社区,我们在这里探索最有效的人机互动**。 +3. 在研究过程中,人们也容易产生偏见,因为大多数人对自己研究的课题都有自己的看法。这个工具可以搜罗到许多观点,并均匀地解释各种不同的观点,而有偏见的人是绝对读不到这些观点的。 + +**请注意,使用 GPT-4 语言模型可能会因使用令牌而产生高昂费用**。使用本项目即表示您承认有责任监控和管理自己的令牌使用情况及相关费用。强烈建议您定期检查 OpenAI API 的使用情况,并设置任何必要的限制或警报,以防止发生意外费用。 + +--- + +

+ + + + + Star History Chart + + +

diff --git a/README.md b/README.md index d7ffae005dee34bb6f27fadcb8842d12a2461592..b7493cf6278d3678ea398c18ce41be8ba43a7007 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,231 @@ ---- -title: GPT Researcher -emoji: 👁 -colorFrom: indigo -colorTo: red -sdk: static -pinned: false -license: apache-2.0 ---- - -Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference +
+ +Logo + +#### + +[![Website](https://img.shields.io/badge/Official%20Website-gptr.dev-teal?style=for-the-badge&logo=world&logoColor=white&color=0891b2)](https://gptr.dev) +[![Documentation](https://img.shields.io/badge/Documentation-DOCS-f472b6?logo=googledocs&logoColor=white&style=for-the-badge)](https://docs.gptr.dev) +[![Discord Follow](https://dcbadge.vercel.app/api/server/QgZXvJAccX?style=for-the-badge&theme=clean-inverted&?compact=true)](https://discord.gg/QgZXvJAccX) + +[![PyPI version](https://img.shields.io/pypi/v/gpt-researcher?logo=pypi&logoColor=white&style=flat)](https://badge.fury.io/py/gpt-researcher) +![GitHub Release](https://img.shields.io/github/v/release/assafelovic/gpt-researcher?style=flat&logo=github) +[![Open In Colab](https://img.shields.io/static/v1?message=Open%20in%20Colab&logo=googlecolab&labelColor=grey&color=yellow&label=%20&style=flat&logoSize=40)](https://colab.research.google.com/github/assafelovic/gpt-researcher/blob/master/docs/docs/examples/pip-run.ipynb) +[![Docker Image Version](https://img.shields.io/docker/v/elestio/gpt-researcher/latest?arch=amd64&style=flat&logo=docker&logoColor=white&color=1D63ED)](https://hub.docker.com/r/gptresearcher/gpt-researcher) +[![Twitter Follow](https://img.shields.io/twitter/follow/assaf_elovic?style=social)](https://twitter.com/assaf_elovic) + +[English](README.md) | [中文](README-zh_CN.md) | [日本語](README-ja_JP.md) | [한국어](README-ko_KR.md) + +
+ +# 🔎 GPT Researcher + +**GPT Researcher is an autonomous agent designed for comprehensive web and local research on any given task.** + +The agent produces detailed, factual, and unbiased research reports with citations. GPT Researcher provides a full suite of customization options to create tailor made and domain specific research agents. Inspired by the recent [Plan-and-Solve](https://arxiv.org/abs/2305.04091) and [RAG](https://arxiv.org/abs/2005.11401) papers, GPT Researcher addresses misinformation, speed, determinism, and reliability by offering stable performance and increased speed through parallelized agent work. + +**Our mission is to empower individuals and organizations with accurate, unbiased, and factual information through AI.** + +## Why GPT Researcher? + +- Objective conclusions for manual research can take weeks, requiring vast resources and time. +- LLMs trained on outdated information can hallucinate, becoming irrelevant for current research tasks. +- Current LLMs have token limitations, insufficient for generating long research reports. +- Limited web sources in existing services lead to misinformation and shallow results. +- Selective web sources can introduce bias into research tasks. + +## Demo +https://github.com/user-attachments/assets/2cc38f6a-9f66-4644-9e69-a46c40e296d4 + +## Architecture + +The core idea is to utilize 'planner' and 'execution' agents. The planner generates research questions, while the execution agents gather relevant information. The publisher then aggregates all findings into a comprehensive report. + +
+ +
+ +Steps: +* Create a task-specific agent based on a research query. +* Generate questions that collectively form an objective opinion on the task. +* Use a crawler agent for gathering information for each question. +* Summarize and source-track each resource. +* Filter and aggregate summaries into a final research report. + +## Tutorials + - [How it Works](https://docs.gptr.dev/blog/building-gpt-researcher) + - [How to Install](https://www.loom.com/share/04ebffb6ed2a4520a27c3e3addcdde20?sid=da1848e8-b1f1-42d1-93c3-5b0b9c3b24ea) + - [Live Demo](https://www.loom.com/share/6a3385db4e8747a1913dd85a7834846f?sid=a740fd5b-2aa3-457e-8fb7-86976f59f9b8) + +## Features + +- 📝 Generate detailed research reports using web and local documents. +- 🖼️ Smart image scraping and filtering for reports. +- 📜 Generate detailed reports exceeding 2,000 words. +- 🌐 Aggregate over 20 sources for objective conclusions. +- 🖥️ Frontend available in lightweight (HTML/CSS/JS) and production-ready (NextJS + Tailwind) versions. +- 🔍 JavaScript-enabled web scraping. +- 📂 Maintains memory and context throughout research. +- 📄 Export reports to PDF, Word, and other formats. + +## 📖 Documentation + +See the [Documentation](https://docs.gptr.dev/docs/gpt-researcher/getting-started/getting-started) for: +- Installation and setup guides +- Configuration and customization options +- How-To examples +- Full API references + +## ⚙️ Getting Started + +### Installation + +1. Install Python 3.11 or later. [Guide](https://www.tutorialsteacher.com/python/install-python). +2. Clone the project and navigate to the directory: + + ```bash + git clone https://github.com/assafelovic/gpt-researcher.git + cd gpt-researcher + ``` + +3. Set up API keys by exporting them or storing them in a `.env` file. + + ```bash + export OPENAI_API_KEY={Your OpenAI API Key here} + export TAVILY_API_KEY={Your Tavily API Key here} + ``` + +4. Install dependencies and start the server: + + ```bash + pip install -r requirements.txt + python -m uvicorn main:app --reload + ``` + +Visit [http://localhost:8000](http://localhost:8000) to start. + +For other setups (e.g., Poetry or virtual environments), check the [Getting Started page](https://docs.gptr.dev/docs/gpt-researcher/getting-started/getting-started). + +## Run as PIP package +```bash +pip install gpt-researcher + +``` +### Example Usage: +```python +... +from gpt_researcher import GPTResearcher + +query = "why is Nvidia stock going up?" +researcher = GPTResearcher(query=query, report_type="research_report") +# Conduct research on the given query +research_result = await researcher.conduct_research() +# Write the report +report = await researcher.write_report() +... +``` + +**For more examples and configurations, please refer to the [PIP documentation](https://docs.gptr.dev/docs/gpt-researcher/gptr/pip-package) page.** + + +## Run with Docker + +> **Step 1** - [Install Docker](https://docs.gptr.dev/docs/gpt-researcher/getting-started/getting-started-with-docker) + +> **Step 2** - Clone the '.env.example' file, add your API Keys to the cloned file and save the file as '.env' + +> **Step 3** - Within the docker-compose file comment out services that you don't want to run with Docker. + +```bash +docker-compose up --build +``` + +If that doesn't work, try running it without the dash: +```bash +docker compose up --build +``` + + +> **Step 4** - By default, if you haven't uncommented anything in your docker-compose file, this flow will start 2 processes: + - the Python server running on localhost:8000
+ - the React app running on localhost:3000
+ +Visit localhost:3000 on any browser and enjoy researching! + + + +## 📄 Research on Local Documents + +You can instruct the GPT Researcher to run research tasks based on your local documents. Currently supported file formats are: PDF, plain text, CSV, Excel, Markdown, PowerPoint, and Word documents. + +Step 1: Add the env variable `DOC_PATH` pointing to the folder where your documents are located. + +```bash +export DOC_PATH="./my-docs" +``` + +Step 2: + - If you're running the frontend app on localhost:8000, simply select "My Documents" from the "Report Source" Dropdown Options. + - If you're running GPT Researcher with the [PIP package](https://docs.tavily.com/docs/gpt-researcher/pip-package), pass the `report_source` argument as "local" when you instantiate the `GPTResearcher` class [code sample here](https://docs.gptr.dev/docs/gpt-researcher/context/tailored-research). + + +## 👪 Multi-Agent Assistant +As AI evolves from prompt engineering and RAG to multi-agent systems, we're excited to introduce our new multi-agent assistant built with [LangGraph](https://python.langchain.com/v0.1/docs/langgraph/). + +By using LangGraph, the research process can be significantly improved in depth and quality by leveraging multiple agents with specialized skills. Inspired by the recent [STORM](https://arxiv.org/abs/2402.14207) paper, this project showcases how a team of AI agents can work together to conduct research on a given topic, from planning to publication. + +An average run generates a 5-6 page research report in multiple formats such as PDF, Docx and Markdown. + +Check it out [here](https://github.com/assafelovic/gpt-researcher/tree/master/multi_agents) or head over to our [documentation](https://docs.gptr.dev/docs/gpt-researcher/multi_agents/langgraph) for more information. + +## 🖥️ Frontend Applications + +GPT-Researcher now features an enhanced frontend to improve the user experience and streamline the research process. The frontend offers: + +- An intuitive interface for inputting research queries +- Real-time progress tracking of research tasks +- Interactive display of research findings +- Customizable settings for tailored research experiences + +Two deployment options are available: +1. A lightweight static frontend served by FastAPI +2. A feature-rich NextJS application for advanced functionality + +For detailed setup instructions and more information about the frontend features, please visit our [documentation page](https://docs.gptr.dev/docs/gpt-researcher/frontend/frontend). + +## 🚀 Contributing +We highly welcome contributions! Please check out [contributing](https://github.com/assafelovic/gpt-researcher/blob/master/CONTRIBUTING.md) if you're interested. + +Please check out our [roadmap](https://trello.com/b/3O7KBePw/gpt-researcher-roadmap) page and reach out to us via our [Discord community](https://discord.gg/QgZXvJAccX) if you're interested in joining our mission. + + + +## ✉️ Support / Contact us +- [Community Discord](https://discord.gg/spBgZmm3Xe) +- Author Email: assaf.elovic@gmail.com + +## 🛡 Disclaimer + +This project, GPT Researcher, is an experimental application and is provided "as-is" without any warranty, express or implied. We are sharing codes for academic purposes under the Apache 2 license. Nothing herein is academic advice, and NOT a recommendation to use in academic or research papers. + +Our view on unbiased research claims: +1. The main goal of GPT Researcher is to reduce incorrect and biased facts. How? We assume that the more sites we scrape the less chances of incorrect data. By scraping multiple sites per research, and choosing the most frequent information, the chances that they are all wrong is extremely low. +2. We do not aim to eliminate biases; we aim to reduce it as much as possible. **We are here as a community to figure out the most effective human/llm interactions.** +3. In research, people also tend towards biases as most have already opinions on the topics they research about. This tool scrapes many opinions and will evenly explain diverse views that a biased person would never have read. + +--- + +

+ + + + + Star History Chart + + +

+ + +

+ ⬆️ Back to Top +

diff --git a/__pycache__/main.cpython-312.pyc b/__pycache__/main.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3bb8858398bb9345ef8947017430d289ef170795 Binary files /dev/null and b/__pycache__/main.cpython-312.pyc differ diff --git a/backend/__init__.py b/backend/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..870e9568c7d9a67bb88f952133124804fb322e7b --- /dev/null +++ b/backend/__init__.py @@ -0,0 +1 @@ +from multi_agents import agents \ No newline at end of file diff --git a/backend/__pycache__/__init__.cpython-312.pyc b/backend/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c928be3f62fca1fa9dc21204233a3d01ad8f02a9 Binary files /dev/null and b/backend/__pycache__/__init__.cpython-312.pyc differ diff --git a/backend/__pycache__/utils.cpython-312.pyc b/backend/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4c732e3f3b1c06c50cc1664551fdb94d579813e3 Binary files /dev/null and b/backend/__pycache__/utils.cpython-312.pyc differ diff --git a/backend/chat/__init__.py b/backend/chat/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..00c51a25e6c0bfc2d1bc6e1261f4cf21b69fc239 --- /dev/null +++ b/backend/chat/__init__.py @@ -0,0 +1 @@ +from .chat import ChatAgentWithMemory \ No newline at end of file diff --git a/backend/chat/__pycache__/__init__.cpython-312.pyc b/backend/chat/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..69516bef55195d81a8e84e40561aa23d50af16f7 Binary files /dev/null and b/backend/chat/__pycache__/__init__.cpython-312.pyc differ diff --git a/backend/chat/__pycache__/chat.cpython-312.pyc b/backend/chat/__pycache__/chat.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..59291f07703bbd5d27539748b8ede6c69af12c70 Binary files /dev/null and b/backend/chat/__pycache__/chat.cpython-312.pyc differ diff --git a/backend/chat/chat.py b/backend/chat/chat.py new file mode 100644 index 0000000000000000000000000000000000000000..cb7a53abedbee0d3978d9503a6630887ac91cda4 --- /dev/null +++ b/backend/chat/chat.py @@ -0,0 +1,106 @@ +from fastapi import WebSocket +import uuid + +from gpt_researcher.utils.llm import get_llm +from gpt_researcher.memory import Memory +from gpt_researcher.config.config import Config + +from langgraph.prebuilt import create_react_agent +from langgraph.checkpoint.memory import MemorySaver + +from langchain_community.vectorstores import InMemoryVectorStore +from langchain.text_splitter import RecursiveCharacterTextSplitter +from langchain.tools import Tool, tool + +class ChatAgentWithMemory: + def __init__( + self, + report: str, + config_path, + headers, + vector_store = None + ): + self.report = report + self.headers = headers + self.config = Config(config_path) + self.vector_store = vector_store + self.graph = self.create_agent() + + def create_agent(self): + """Create React Agent Graph""" + cfg = Config() + + # Retrieve LLM using get_llm with settings from config + provider = get_llm( + llm_provider=cfg.smart_llm_provider, + model=cfg.smart_llm_model, + temperature=0.35, + max_tokens=cfg.smart_token_limit, + **self.config.llm_kwargs + ).llm + + # If vector_store is not initialized, process documents and add to vector_store + if not self.vector_store: + documents = self._process_document(self.report) + self.chat_config = {"configurable": {"thread_id": str(uuid.uuid4())}} + self.embedding = Memory( + cfg.embedding_provider, + cfg.embedding_model, + **cfg.embedding_kwargs + ).get_embeddings() + self.vector_store = InMemoryVectorStore(self.embedding) + self.vector_store.add_texts(documents) + + # Create the React Agent Graph with the configured provider + graph = create_react_agent( + provider, + tools=[self.vector_store_tool(self.vector_store)], + checkpointer=MemorySaver() + ) + + return graph + + def vector_store_tool(self, vector_store) -> Tool: + """Create Vector Store Tool""" + @tool + def retrieve_info(query): + """ + Consult the report for relevant contexts whenever you don't know something + """ + retriever = vector_store.as_retriever(k = 4) + return retriever.invoke(query) + return retrieve_info + + def _process_document(self, report): + """Split Report into Chunks""" + text_splitter = RecursiveCharacterTextSplitter( + chunk_size=1024, + chunk_overlap=20, + length_function=len, + is_separator_regex=False, + ) + documents = text_splitter.split_text(report) + return documents + + async def chat(self, message, websocket): + """Chat with React Agent""" + message = f""" + You are GPT Researcher, a autonomous research agent created by an open source community at https://github.com/assafelovic/gpt-researcher, homepage: https://gptr.dev. + To learn more about GPT Researcher you can suggest to check out: https://docs.gptr.dev. + + This is a chat message between the user and you: GPT Researcher. + The chat is about a research reports that you created. Answer based on the given context and report. + You must include citations to your answer based on the report. + + Report: {self.report} + User Message: {message} + """ + inputs = {"messages": [("user", message)]} + response = await self.graph.ainvoke(inputs, config=self.chat_config) + ai_message = response["messages"][-1].content + if websocket is not None: + await websocket.send_json({"type": "chat", "content": ai_message}) + + def get_context(self): + """return the current context of the chat""" + return self.report diff --git a/backend/memory/__init__.py b/backend/memory/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/memory/draft.py b/backend/memory/draft.py new file mode 100644 index 0000000000000000000000000000000000000000..a5e6862146d56672857e837a91b31c6b0e23b962 --- /dev/null +++ b/backend/memory/draft.py @@ -0,0 +1,10 @@ +from typing import TypedDict, List, Annotated +import operator + + +class DraftState(TypedDict): + task: dict + topic: str + draft: dict + review: str + revision_notes: str \ No newline at end of file diff --git a/backend/memory/research.py b/backend/memory/research.py new file mode 100644 index 0000000000000000000000000000000000000000..337e1f17705e8eb44e99e3fa37a390fa2e36f423 --- /dev/null +++ b/backend/memory/research.py @@ -0,0 +1,20 @@ +from typing import TypedDict, List, Annotated +import operator + + +class ResearchState(TypedDict): + task: dict + initial_research: str + sections: List[str] + research_data: List[dict] + # Report layout + title: str + headers: dict + date: str + table_of_contents: str + introduction: str + conclusion: str + sources: List[str] + report: str + + diff --git a/backend/report_type/__init__.py b/backend/report_type/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6f1e0faef859d32df2a48fbf67c99bc7b66aa1db --- /dev/null +++ b/backend/report_type/__init__.py @@ -0,0 +1,7 @@ +from .basic_report.basic_report import BasicReport +from .detailed_report.detailed_report import DetailedReport + +__all__ = [ + "BasicReport", + "DetailedReport" +] \ No newline at end of file diff --git a/backend/report_type/__pycache__/__init__.cpython-312.pyc b/backend/report_type/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f7c096c84c1dff1a9618079809c4243f29139634 Binary files /dev/null and b/backend/report_type/__pycache__/__init__.cpython-312.pyc differ diff --git a/backend/report_type/basic_report/__init__.py b/backend/report_type/basic_report/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/report_type/basic_report/__pycache__/__init__.cpython-312.pyc b/backend/report_type/basic_report/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..81e7cb3045a3f9ed56d2afcef731a0bdfdfbe4a4 Binary files /dev/null and b/backend/report_type/basic_report/__pycache__/__init__.cpython-312.pyc differ diff --git a/backend/report_type/basic_report/__pycache__/basic_report.cpython-312.pyc b/backend/report_type/basic_report/__pycache__/basic_report.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..80106abafb9f41bf84ec5d940b6dcaa0f23fa87d Binary files /dev/null and b/backend/report_type/basic_report/__pycache__/basic_report.cpython-312.pyc differ diff --git a/backend/report_type/basic_report/basic_report.py b/backend/report_type/basic_report/basic_report.py new file mode 100644 index 0000000000000000000000000000000000000000..fd29c732d0b6d887869984ad8da8fcf0493c5c1c --- /dev/null +++ b/backend/report_type/basic_report/basic_report.py @@ -0,0 +1,46 @@ +from fastapi import WebSocket +from typing import Any + +from gpt_researcher import GPTResearcher + + +class BasicReport: + def __init__( + self, + query: str, + report_type: str, + report_source: str, + source_urls, + document_urls, + tone: Any, + config_path: str, + websocket: WebSocket, + headers=None + ): + self.query = query + self.report_type = report_type + self.report_source = report_source + self.source_urls = source_urls + self.document_urls = document_urls + self.tone = tone + self.config_path = config_path + self.websocket = websocket + self.headers = headers or {} + + async def run(self): + # Initialize researcher + researcher = GPTResearcher( + query=self.query, + report_type=self.report_type, + report_source=self.report_source, + source_urls=self.source_urls, + document_urls=self.document_urls, + tone=self.tone, + config_path=self.config_path, + websocket=self.websocket, + headers=self.headers + ) + + await researcher.conduct_research() + report = await researcher.write_report() + return report diff --git a/backend/report_type/detailed_report/README.md b/backend/report_type/detailed_report/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1348d7a22f4df4304990e4b57e406ae16c16a660 --- /dev/null +++ b/backend/report_type/detailed_report/README.md @@ -0,0 +1,12 @@ +## Detailed Reports + +Introducing long and detailed reports, with a completely new architecture inspired by the latest [STORM](https://arxiv.org/abs/2402.14207) paper. + +In this method we do the following: + +1. Trigger Initial GPT Researcher report based on task +2. Generate subtopics from research summary +3. For each subtopic the headers of the subtopic report are extracted and accumulated +4. For each subtopic a report is generated making sure that any information about the headers accumulated until now are not re-generated. +5. An additional introduction section is written along with a table of contents constructed from the entire report. +6. The final report is constructed by appending these : Intro + Table of contents + Subsection reports \ No newline at end of file diff --git a/backend/report_type/detailed_report/__init__.py b/backend/report_type/detailed_report/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/report_type/detailed_report/__pycache__/__init__.cpython-312.pyc b/backend/report_type/detailed_report/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e75aba000b4992347fd191acd8f398d8007fdf2c Binary files /dev/null and b/backend/report_type/detailed_report/__pycache__/__init__.cpython-312.pyc differ diff --git a/backend/report_type/detailed_report/__pycache__/detailed_report.cpython-312.pyc b/backend/report_type/detailed_report/__pycache__/detailed_report.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7f74a5e6146f32fa3b8c1149e4b8705c6588c8a9 Binary files /dev/null and b/backend/report_type/detailed_report/__pycache__/detailed_report.cpython-312.pyc differ diff --git a/backend/report_type/detailed_report/detailed_report.py b/backend/report_type/detailed_report/detailed_report.py new file mode 100644 index 0000000000000000000000000000000000000000..290f2215b8e8f808501ff1fba84a7e94b31e1b30 --- /dev/null +++ b/backend/report_type/detailed_report/detailed_report.py @@ -0,0 +1,139 @@ +import asyncio +from typing import List, Dict, Set, Optional, Any +from fastapi import WebSocket + +from gpt_researcher import GPTResearcher + + +class DetailedReport: + def __init__( + self, + query: str, + report_type: str, + report_source: str, + source_urls: List[str] = [], + document_urls: List[str] = [], + config_path: str = None, + tone: Any = "", + websocket: WebSocket = None, + subtopics: List[Dict] = [], + headers: Optional[Dict] = None + ): + self.query = query + self.report_type = report_type + self.report_source = report_source + self.source_urls = source_urls + self.document_urls = document_urls + self.config_path = config_path + self.tone = tone + self.websocket = websocket + self.subtopics = subtopics + self.headers = headers or {} + + self.gpt_researcher = GPTResearcher( + query=self.query, + report_type="research_report", + report_source=self.report_source, + source_urls=self.source_urls, + document_urls=self.document_urls, + config_path=self.config_path, + tone=self.tone, + websocket=self.websocket, + headers=self.headers + ) + self.existing_headers: List[Dict] = [] + self.global_context: List[str] = [] + self.global_written_sections: List[str] = [] + self.global_urls: Set[str] = set( + self.source_urls) if self.source_urls else set() + + async def run(self) -> str: + await self._initial_research() + subtopics = await self._get_all_subtopics() + report_introduction = await self.gpt_researcher.write_introduction() + _, report_body = await self._generate_subtopic_reports(subtopics) + self.gpt_researcher.visited_urls.update(self.global_urls) + report = await self._construct_detailed_report(report_introduction, report_body) + return report + + async def _initial_research(self) -> None: + await self.gpt_researcher.conduct_research() + self.global_context = self.gpt_researcher.context + self.global_urls = self.gpt_researcher.visited_urls + + async def _get_all_subtopics(self) -> List[Dict]: + subtopics_data = await self.gpt_researcher.get_subtopics() + + all_subtopics = [] + if subtopics_data and subtopics_data.subtopics: + for subtopic in subtopics_data.subtopics: + all_subtopics.append({"task": subtopic.task}) + else: + print(f"Unexpected subtopics data format: {subtopics_data}") + + return all_subtopics + + async def _generate_subtopic_reports(self, subtopics: List[Dict]) -> tuple: + subtopic_reports = [] + subtopics_report_body = "" + + for subtopic in subtopics: + result = await self._get_subtopic_report(subtopic) + if result["report"]: + subtopic_reports.append(result) + subtopics_report_body += f"\n\n\n{result['report']}" + + return subtopic_reports, subtopics_report_body + + async def _get_subtopic_report(self, subtopic: Dict) -> Dict[str, str]: + current_subtopic_task = subtopic.get("task") + subtopic_assistant = GPTResearcher( + query=current_subtopic_task, + report_type="subtopic_report", + report_source=self.report_source, + websocket=self.websocket, + headers=self.headers, + parent_query=self.query, + subtopics=self.subtopics, + visited_urls=self.global_urls, + agent=self.gpt_researcher.agent, + role=self.gpt_researcher.role, + tone=self.tone, + ) + + subtopic_assistant.context = list(set(self.global_context)) + await subtopic_assistant.conduct_research() + + draft_section_titles = await subtopic_assistant.get_draft_section_titles(current_subtopic_task) + + if not isinstance(draft_section_titles, str): + draft_section_titles = str(draft_section_titles) + + parse_draft_section_titles = self.gpt_researcher.extract_headers(draft_section_titles) + parse_draft_section_titles_text = [header.get( + "text", "") for header in parse_draft_section_titles] + + relevant_contents = await subtopic_assistant.get_similar_written_contents_by_draft_section_titles( + current_subtopic_task, parse_draft_section_titles_text, self.global_written_sections + ) + + subtopic_report = await subtopic_assistant.write_report(self.existing_headers, relevant_contents) + + self.global_written_sections.extend(self.gpt_researcher.extract_sections(subtopic_report)) + self.global_context = list(set(subtopic_assistant.context)) + self.global_urls.update(subtopic_assistant.visited_urls) + + self.existing_headers.append({ + "subtopic task": current_subtopic_task, + "headers": self.gpt_researcher.extract_headers(subtopic_report), + }) + + return {"topic": subtopic, "report": subtopic_report} + + async def _construct_detailed_report(self, introduction: str, report_body: str) -> str: + toc = self.gpt_researcher.table_of_contents(report_body) + conclusion = await self.gpt_researcher.write_report_conclusion(report_body) + conclusion_with_references = self.gpt_researcher.add_references( + conclusion, self.gpt_researcher.visited_urls) + report = f"{introduction}\n\n{toc}\n\n{report_body}\n\n{conclusion_with_references}" + return report diff --git a/backend/server/__init__.py b/backend/server/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/server/__pycache__/__init__.cpython-312.pyc b/backend/server/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..672366d9480e7ab9c2d808a48498f8447e52ca8a Binary files /dev/null and b/backend/server/__pycache__/__init__.cpython-312.pyc differ diff --git a/backend/server/__pycache__/server.cpython-312.pyc b/backend/server/__pycache__/server.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..02aa29ab278e5ae5828332acfe20b437c6b99ee2 Binary files /dev/null and b/backend/server/__pycache__/server.cpython-312.pyc differ diff --git a/backend/server/__pycache__/server_utils.cpython-312.pyc b/backend/server/__pycache__/server_utils.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f78d761e038a1f84d00da4e8f6bd331d102f716f Binary files /dev/null and b/backend/server/__pycache__/server_utils.cpython-312.pyc differ diff --git a/backend/server/__pycache__/websocket_manager.cpython-312.pyc b/backend/server/__pycache__/websocket_manager.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..74fd54ea92a6cf3adc771d8fd96af3127c740704 Binary files /dev/null and b/backend/server/__pycache__/websocket_manager.cpython-312.pyc differ diff --git a/backend/server/app.py b/backend/server/app.py new file mode 100644 index 0000000000000000000000000000000000000000..e15d43a8cbd0c4a4400525c291f5a8dca96e6ab0 --- /dev/null +++ b/backend/server/app.py @@ -0,0 +1,16 @@ +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +import logging + +logger = logging.getLogger(__name__) + +app = FastAPI() + +# Add CORS middleware +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], # In production, replace with your frontend domain + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) \ No newline at end of file diff --git a/backend/server/logging_config.py b/backend/server/logging_config.py new file mode 100644 index 0000000000000000000000000000000000000000..c3821273bcfe71721a707e2de56dd6e5b62d8c99 --- /dev/null +++ b/backend/server/logging_config.py @@ -0,0 +1,83 @@ +import logging +import json +import os +from datetime import datetime +from pathlib import Path + +class JSONResearchHandler: + def __init__(self, json_file): + self.json_file = json_file + self.research_data = { + "timestamp": datetime.now().isoformat(), + "events": [], + "content": { + "query": "", + "sources": [], + "context": [], + "report": "", + "costs": 0.0 + } + } + + def log_event(self, event_type: str, data: dict): + self.research_data["events"].append({ + "timestamp": datetime.now().isoformat(), + "type": event_type, + "data": data + }) + self._save_json() + + def update_content(self, key: str, value): + self.research_data["content"][key] = value + self._save_json() + + def _save_json(self): + with open(self.json_file, 'w') as f: + json.dump(self.research_data, f, indent=2) + +def setup_research_logging(): + # Create logs directory if it doesn't exist + logs_dir = Path("logs") + logs_dir.mkdir(exist_ok=True) + + # Generate timestamp for log files + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + + # Create log file paths + log_file = logs_dir / f"research_{timestamp}.log" + json_file = logs_dir / f"research_{timestamp}.json" + + # Configure file handler for research logs + file_handler = logging.FileHandler(log_file) + file_handler.setLevel(logging.INFO) + file_handler.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')) + + # Get research logger and configure it + research_logger = logging.getLogger('research') + research_logger.setLevel(logging.INFO) + + # Remove any existing handlers to avoid duplicates + research_logger.handlers.clear() + + # Add file handler + research_logger.addHandler(file_handler) + + # Add stream handler for console output + console_handler = logging.StreamHandler() + console_handler.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')) + research_logger.addHandler(console_handler) + + # Prevent propagation to root logger to avoid duplicate logs + research_logger.propagate = False + + # Create JSON handler + json_handler = JSONResearchHandler(json_file) + + return str(log_file), str(json_file), research_logger, json_handler + +# Create a function to get the logger and JSON handler +def get_research_logger(): + return logging.getLogger('research') + +def get_json_handler(): + return getattr(logging.getLogger('research'), 'json_handler', None) \ No newline at end of file diff --git a/backend/server/server.py b/backend/server/server.py new file mode 100644 index 0000000000000000000000000000000000000000..6d2f3ee297771eba6e696dca81cb88c41ccd669d --- /dev/null +++ b/backend/server/server.py @@ -0,0 +1,134 @@ +import json +import os +from typing import Dict, List + +from fastapi import FastAPI, Request, WebSocket, WebSocketDisconnect, File, UploadFile, Header +from fastapi.middleware.cors import CORSMiddleware +from fastapi.staticfiles import StaticFiles +from fastapi.templating import Jinja2Templates +from pydantic import BaseModel + +from backend.server.websocket_manager import WebSocketManager +from backend.server.server_utils import ( + get_config_dict, + update_environment_variables, handle_file_upload, handle_file_deletion, + execute_multi_agents, handle_websocket_communication +) + + +from gpt_researcher.utils.logging_config import setup_research_logging + +import logging + +# Get logger instance +logger = logging.getLogger(__name__) + +# Don't override parent logger settings +logger.propagate = True + +logging.basicConfig( + level=logging.INFO, + format="%(asctime)s - %(levelname)s - %(message)s", + handlers=[ + logging.StreamHandler() # Only log to console + ] +) + +# Models + + +class ResearchRequest(BaseModel): + task: str + report_type: str + agent: str + + +class ConfigRequest(BaseModel): + ANTHROPIC_API_KEY: str + TAVILY_API_KEY: str + LANGCHAIN_TRACING_V2: str + LANGCHAIN_API_KEY: str + OPENAI_API_KEY: str + DOC_PATH: str + RETRIEVER: str + GOOGLE_API_KEY: str = '' + GOOGLE_CX_KEY: str = '' + BING_API_KEY: str = '' + SEARCHAPI_API_KEY: str = '' + SERPAPI_API_KEY: str = '' + SERPER_API_KEY: str = '' + SEARX_URL: str = '' + XAI_API_KEY: str + DEEPSEEK_API_KEY: str + + +# App initialization +app = FastAPI() + +# Static files and templates +app.mount("/site", StaticFiles(directory="./frontend"), name="site") +app.mount("/static", StaticFiles(directory="./frontend/static"), name="static") +templates = Jinja2Templates(directory="./frontend") + +# WebSocket manager +manager = WebSocketManager() + +# Middleware +app.add_middleware( + CORSMiddleware, + allow_origins=["http://localhost:3000"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Constants +DOC_PATH = os.getenv("DOC_PATH", "./my-docs") + +# Startup event + + +@app.on_event("startup") +def startup_event(): + os.makedirs("outputs", exist_ok=True) + app.mount("/outputs", StaticFiles(directory="outputs"), name="outputs") + os.makedirs(DOC_PATH, exist_ok=True) + + +# Routes + + +@app.get("/") +async def read_root(request: Request): + return templates.TemplateResponse("index.html", {"request": request, "report": None}) + + +@app.get("/files/") +async def list_files(): + files = os.listdir(DOC_PATH) + print(f"Files in {DOC_PATH}: {files}") + return {"files": files} + + +@app.post("/api/multi_agents") +async def run_multi_agents(): + return await execute_multi_agents(manager) + + +@app.post("/upload/") +async def upload_file(file: UploadFile = File(...)): + return await handle_file_upload(file, DOC_PATH) + + +@app.delete("/files/{filename}") +async def delete_file(filename: str): + return await handle_file_deletion(filename, DOC_PATH) + + +@app.websocket("/ws") +async def websocket_endpoint(websocket: WebSocket): + await manager.connect(websocket) + try: + await handle_websocket_communication(websocket, manager) + except WebSocketDisconnect: + await manager.disconnect(websocket) diff --git a/backend/server/server_utils.py b/backend/server/server_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..b06eceb18ff794bc376ebd4533b98c64786b9e84 --- /dev/null +++ b/backend/server/server_utils.py @@ -0,0 +1,259 @@ +import json +import os +import re +import time +import shutil +from typing import Dict, List, Any +from fastapi.responses import JSONResponse, FileResponse +from gpt_researcher.document.document import DocumentLoader +from backend.utils import write_md_to_pdf, write_md_to_word, write_text_to_md +from pathlib import Path +from datetime import datetime +from fastapi import HTTPException +import logging + +logging.basicConfig(level=logging.DEBUG) +logger = logging.getLogger(__name__) + +class CustomLogsHandler: + """Custom handler to capture streaming logs from the research process""" + def __init__(self, websocket, task: str): + self.logs = [] + self.websocket = websocket + sanitized_filename = sanitize_filename(f"task_{int(time.time())}_{task}") + self.log_file = os.path.join("outputs", f"{sanitized_filename}.json") + self.timestamp = datetime.now().isoformat() + # Initialize log file with metadata + os.makedirs("outputs", exist_ok=True) + with open(self.log_file, 'w') as f: + json.dump({ + "timestamp": self.timestamp, + "events": [], + "content": { + "query": "", + "sources": [], + "context": [], + "report": "", + "costs": 0.0 + } + }, f, indent=2) + + async def send_json(self, data: Dict[str, Any]) -> None: + """Store log data and send to websocket""" + # Send to websocket for real-time display + if self.websocket: + await self.websocket.send_json(data) + + # Read current log file + with open(self.log_file, 'r') as f: + log_data = json.load(f) + + # Update appropriate section based on data type + if data.get('type') == 'logs': + log_data['events'].append({ + "timestamp": datetime.now().isoformat(), + "type": "event", + "data": data + }) + else: + # Update content section for other types of data + log_data['content'].update(data) + + # Save updated log file + with open(self.log_file, 'w') as f: + json.dump(log_data, f, indent=2) + logger.debug(f"Log entry written to: {self.log_file}") + + +class Researcher: + def __init__(self, query: str, report_type: str = "research_report"): + self.query = query + self.report_type = report_type + # Generate unique ID for this research task + self.research_id = f"{datetime.now().strftime('%Y%m%d_%H%M%S')}_{hash(query)}" + # Initialize logs handler with research ID + self.logs_handler = CustomLogsHandler(self.research_id) + self.researcher = GPTResearcher( + query=query, + report_type=report_type, + websocket=self.logs_handler + ) + + async def research(self) -> dict: + """Conduct research and return paths to generated files""" + await self.researcher.conduct_research() + report = await self.researcher.write_report() + + # Generate the files + sanitized_filename = sanitize_filename(f"task_{int(time.time())}_{self.query}") + file_paths = await generate_report_files(report, sanitized_filename) + + # Get the JSON log path that was created by CustomLogsHandler + json_relative_path = os.path.relpath(self.logs_handler.log_file) + + return { + "output": { + **file_paths, # Include PDF, DOCX, and MD paths + "json": json_relative_path + } + } + +def sanitize_filename(filename: str) -> str: + # Split into components + prefix, timestamp, *task_parts = filename.split('_') + task = '_'.join(task_parts) + + # Calculate max length for task portion + # 255 - len("outputs/") - len("task_") - len(timestamp) - len("_.json") - safety_margin + max_task_length = 255 - 8 - 5 - 10 - 6 - 10 # ~216 chars for task + + # Truncate task if needed + truncated_task = task[:max_task_length] if len(task) > max_task_length else task + + # Reassemble and clean the filename + sanitized = f"{prefix}_{timestamp}_{truncated_task}" + return re.sub(r"[^\w\s-]", "", sanitized).strip() + + +async def handle_start_command(websocket, data: str, manager): + json_data = json.loads(data[6:]) + task, report_type, source_urls, document_urls, tone, headers, report_source = extract_command_data( + json_data) + + if not task or not report_type: + print("Error: Missing task or report_type") + return + + # Create logs handler with websocket and task + logs_handler = CustomLogsHandler(websocket, task) + # Initialize log content with query + await logs_handler.send_json({ + "query": task, + "sources": [], + "context": [], + "report": "" + }) + + sanitized_filename = sanitize_filename(f"task_{int(time.time())}_{task}") + + report = await manager.start_streaming( + task, + report_type, + report_source, + source_urls, + document_urls, + tone, + websocket, + headers + ) + report = str(report) + file_paths = await generate_report_files(report, sanitized_filename) + # Add JSON log path to file_paths + file_paths["json"] = os.path.relpath(logs_handler.log_file) + await send_file_paths(websocket, file_paths) + + +async def handle_human_feedback(data: str): + feedback_data = json.loads(data[14:]) # Remove "human_feedback" prefix + print(f"Received human feedback: {feedback_data}") + # TODO: Add logic to forward the feedback to the appropriate agent or update the research state + +async def handle_chat(websocket, data: str, manager): + json_data = json.loads(data[4:]) + print(f"Received chat message: {json_data.get('message')}") + await manager.chat(json_data.get("message"), websocket) + +async def generate_report_files(report: str, filename: str) -> Dict[str, str]: + pdf_path = await write_md_to_pdf(report, filename) + docx_path = await write_md_to_word(report, filename) + md_path = await write_text_to_md(report, filename) + return {"pdf": pdf_path, "docx": docx_path, "md": md_path} + + +async def send_file_paths(websocket, file_paths: Dict[str, str]): + await websocket.send_json({"type": "path", "output": file_paths}) + + +def get_config_dict( + langchain_api_key: str, openai_api_key: str, tavily_api_key: str, + google_api_key: str, google_cx_key: str, bing_api_key: str, + searchapi_api_key: str, serpapi_api_key: str, serper_api_key: str, searx_url: str +) -> Dict[str, str]: + return { + "LANGCHAIN_API_KEY": langchain_api_key or os.getenv("LANGCHAIN_API_KEY", ""), + "OPENAI_API_KEY": openai_api_key or os.getenv("OPENAI_API_KEY", ""), + "TAVILY_API_KEY": tavily_api_key or os.getenv("TAVILY_API_KEY", ""), + "GOOGLE_API_KEY": google_api_key or os.getenv("GOOGLE_API_KEY", ""), + "GOOGLE_CX_KEY": google_cx_key or os.getenv("GOOGLE_CX_KEY", ""), + "BING_API_KEY": bing_api_key or os.getenv("BING_API_KEY", ""), + "SEARCHAPI_API_KEY": searchapi_api_key or os.getenv("SEARCHAPI_API_KEY", ""), + "SERPAPI_API_KEY": serpapi_api_key or os.getenv("SERPAPI_API_KEY", ""), + "SERPER_API_KEY": serper_api_key or os.getenv("SERPER_API_KEY", ""), + "SEARX_URL": searx_url or os.getenv("SEARX_URL", ""), + "LANGCHAIN_TRACING_V2": os.getenv("LANGCHAIN_TRACING_V2", "true"), + "DOC_PATH": os.getenv("DOC_PATH", "./my-docs"), + "RETRIEVER": os.getenv("RETRIEVER", ""), + "EMBEDDING_MODEL": os.getenv("OPENAI_EMBEDDING_MODEL", "") + } + + +def update_environment_variables(config: Dict[str, str]): + for key, value in config.items(): + os.environ[key] = value + + +async def handle_file_upload(file, DOC_PATH: str) -> Dict[str, str]: + file_path = os.path.join(DOC_PATH, os.path.basename(file.filename)) + with open(file_path, "wb") as buffer: + shutil.copyfileobj(file.file, buffer) + print(f"File uploaded to {file_path}") + + document_loader = DocumentLoader(DOC_PATH) + await document_loader.load() + + return {"filename": file.filename, "path": file_path} + + +async def handle_file_deletion(filename: str, DOC_PATH: str) -> JSONResponse: + file_path = os.path.join(DOC_PATH, os.path.basename(filename)) + if os.path.exists(file_path): + os.remove(file_path) + print(f"File deleted: {file_path}") + return JSONResponse(content={"message": "File deleted successfully"}) + else: + print(f"File not found: {file_path}") + return JSONResponse(status_code=404, content={"message": "File not found"}) + + +async def execute_multi_agents(manager) -> Any: + websocket = manager.active_connections[0] if manager.active_connections else None + if websocket: + report = await run_research_task("Is AI in a hype cycle?", websocket, stream_output) + return {"report": report} + else: + return JSONResponse(status_code=400, content={"message": "No active WebSocket connection"}) + + +async def handle_websocket_communication(websocket, manager): + while True: + data = await websocket.receive_text() + if data.startswith("start"): + await handle_start_command(websocket, data, manager) + elif data.startswith("human_feedback"): + await handle_human_feedback(data) + elif data.startswith("chat"): + await handle_chat(websocket, data, manager) + else: + print("Error: Unknown command or not enough parameters provided.") + + +def extract_command_data(json_data: Dict) -> tuple: + return ( + json_data.get("task"), + json_data.get("report_type"), + json_data.get("source_urls"), + json_data.get("document_urls"), + json_data.get("tone"), + json_data.get("headers", {}), + json_data.get("report_source") + ) diff --git a/backend/server/websocket_manager.py b/backend/server/websocket_manager.py new file mode 100644 index 0000000000000000000000000000000000000000..8a9b4c6e4844173aab30785297b8bf5b3db2a048 --- /dev/null +++ b/backend/server/websocket_manager.py @@ -0,0 +1,125 @@ +import asyncio +import datetime +from typing import Dict, List + +from fastapi import WebSocket + +from backend.report_type import BasicReport, DetailedReport +from backend.chat import ChatAgentWithMemory + +from gpt_researcher.utils.enum import ReportType, Tone +from multi_agents.main import run_research_task +from gpt_researcher.actions import stream_output # Import stream_output +from backend.server.server_utils import CustomLogsHandler + + +class WebSocketManager: + """Manage websockets""" + + def __init__(self): + """Initialize the WebSocketManager class.""" + self.active_connections: List[WebSocket] = [] + self.sender_tasks: Dict[WebSocket, asyncio.Task] = {} + self.message_queues: Dict[WebSocket, asyncio.Queue] = {} + self.chat_agent = None + + async def start_sender(self, websocket: WebSocket): + """Start the sender task.""" + queue = self.message_queues.get(websocket) + if not queue: + return + + while True: + message = await queue.get() + if websocket in self.active_connections: + try: + if message == "ping": + await websocket.send_text("pong") + else: + await websocket.send_text(message) + except: + break + else: + break + + async def connect(self, websocket: WebSocket): + """Connect a websocket.""" + await websocket.accept() + self.active_connections.append(websocket) + self.message_queues[websocket] = asyncio.Queue() + self.sender_tasks[websocket] = asyncio.create_task( + self.start_sender(websocket)) + + async def disconnect(self, websocket: WebSocket): + """Disconnect a websocket.""" + if websocket in self.active_connections: + self.active_connections.remove(websocket) + self.sender_tasks[websocket].cancel() + await self.message_queues[websocket].put(None) + del self.sender_tasks[websocket] + del self.message_queues[websocket] + + async def start_streaming(self, task, report_type, report_source, source_urls, document_urls, tone, websocket, headers=None): + """Start streaming the output.""" + tone = Tone[tone] + # add customized JSON config file path here + config_path = "default" + report = await run_agent(task, report_type, report_source, source_urls, document_urls, tone, websocket, headers = headers, config_path = config_path) + #Create new Chat Agent whenever a new report is written + self.chat_agent = ChatAgentWithMemory(report, config_path, headers) + return report + + async def chat(self, message, websocket): + """Chat with the agent based message diff""" + if self.chat_agent: + await self.chat_agent.chat(message, websocket) + else: + await websocket.send_json({"type": "chat", "content": "Knowledge empty, please run the research first to obtain knowledge"}) + +async def run_agent(task, report_type, report_source, source_urls, document_urls, tone: Tone, websocket, headers=None, config_path=""): + """Run the agent.""" + start_time = datetime.datetime.now() + + # Create logs handler for this research task + logs_handler = CustomLogsHandler(websocket, task) + + # Initialize researcher based on report type + if report_type == "multi_agents": + report = await run_research_task( + query=task, + websocket=logs_handler, # Use logs_handler instead of raw websocket + stream_output=stream_output, + tone=tone, + headers=headers + ) + report = report.get("report", "") + + elif report_type == ReportType.DetailedReport.value: + researcher = DetailedReport( + query=task, + report_type=report_type, + report_source=report_source, + source_urls=source_urls, + document_urls=document_urls, + tone=tone, + config_path=config_path, + websocket=logs_handler, # Use logs_handler instead of raw websocket + headers=headers + ) + report = await researcher.run() + + else: + researcher = BasicReport( + query=task, + report_type=report_type, + report_source=report_source, + source_urls=source_urls, + document_urls=document_urls, + tone=tone, + config_path=config_path, + websocket=logs_handler, # Use logs_handler instead of raw websocket + headers=headers + ) + report = await researcher.run() + + return report diff --git a/backend/utils.py b/backend/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..5870747fdcdeb25b83937581f8a4a77e3d972b6a --- /dev/null +++ b/backend/utils.py @@ -0,0 +1,92 @@ +import aiofiles +import urllib +import mistune + +async def write_to_file(filename: str, text: str) -> None: + """Asynchronously write text to a file in UTF-8 encoding. + + Args: + filename (str): The filename to write to. + text (str): The text to write. + """ + # Ensure text is a string + if not isinstance(text, str): + text = str(text) + + # Convert text to UTF-8, replacing any problematic characters + text_utf8 = text.encode('utf-8', errors='replace').decode('utf-8') + + async with aiofiles.open(filename, "w", encoding='utf-8') as file: + await file.write(text_utf8) + +async def write_text_to_md(text: str, filename: str = "") -> str: + """Writes text to a Markdown file and returns the file path. + + Args: + text (str): Text to write to the Markdown file. + + Returns: + str: The file path of the generated Markdown file. + """ + file_path = f"outputs/{filename[:60]}.md" + await write_to_file(file_path, text) + return urllib.parse.quote(file_path) + +async def write_md_to_pdf(text: str, filename: str = "") -> str: + """Converts Markdown text to a PDF file and returns the file path. + + Args: + text (str): Markdown text to convert. + + Returns: + str: The encoded file path of the generated PDF. + """ + file_path = f"outputs/{filename[:60]}.pdf" + + try: + from md2pdf.core import md2pdf + md2pdf(file_path, + md_content=text, + # md_file_path=f"{file_path}.md", + css_file_path="./frontend/pdf_styles.css", + base_url=None) + print(f"Report written to {file_path}") + except Exception as e: + print(f"Error in converting Markdown to PDF: {e}") + return "" + + encoded_file_path = urllib.parse.quote(file_path) + return encoded_file_path + +async def write_md_to_word(text: str, filename: str = "") -> str: + """Converts Markdown text to a DOCX file and returns the file path. + + Args: + text (str): Markdown text to convert. + + Returns: + str: The encoded file path of the generated DOCX. + """ + file_path = f"outputs/{filename[:60]}.docx" + + try: + from docx import Document + from htmldocx import HtmlToDocx + # Convert report markdown to HTML + html = mistune.html(text) + # Create a document object + doc = Document() + # Convert the html generated from the report to document format + HtmlToDocx().add_html_to_document(html, doc) + + # Saving the docx document to file_path + doc.save(file_path) + + print(f"Report written to {file_path}") + + encoded_file_path = urllib.parse.quote(file_path) + return encoded_file_path + + except Exception as e: + print(f"Error in converting Markdown to DOCX: {e}") + return "" \ No newline at end of file diff --git a/citation.cff b/citation.cff new file mode 100644 index 0000000000000000000000000000000000000000..f87c4afe9a3b837df9173109532ba5f5820814e8 --- /dev/null +++ b/citation.cff @@ -0,0 +1,10 @@ +cff-version: 1.0.0 +message: "If you use this software, please cite it as below." +authors: + - family-names: Elovic + given-names: Assaf +title: gpt-researcher +version: 0.5.4 +date-released: 2023-07-23 +repository-code: https://github.com/assafelovic/gpt-researcher +url: https://gptr.dev \ No newline at end of file diff --git a/cli.py b/cli.py new file mode 100644 index 0000000000000000000000000000000000000000..8543e38ebabd953b59fbcb1533152451832b85d0 --- /dev/null +++ b/cli.py @@ -0,0 +1,139 @@ +""" +Provides a command line interface for the GPTResearcher class. + +Usage: + +```shell +python cli.py "" --report_type +``` + +""" +import asyncio +import argparse +from argparse import RawTextHelpFormatter +from uuid import uuid4 +import os + +from dotenv import load_dotenv + +from gpt_researcher import GPTResearcher +from gpt_researcher.utils.enum import ReportType, Tone +from backend.report_type import DetailedReport + +# ============================================================================= +# CLI +# ============================================================================= + +cli = argparse.ArgumentParser( + description="Generate a research report.", + # Enables the use of newlines in the help message + formatter_class=RawTextHelpFormatter) + +# ===================================== +# Arg: Query +# ===================================== + +cli.add_argument( + # Position 0 argument + "query", + type=str, + help="The query to conduct research on.") + +# ===================================== +# Arg: Report Type +# ===================================== + +choices = [report_type.value for report_type in ReportType] + +report_type_descriptions = { + ReportType.ResearchReport.value: "Summary - Short and fast (~2 min)", + ReportType.DetailedReport.value: "Detailed - In depth and longer (~5 min)", + ReportType.ResourceReport.value: "", + ReportType.OutlineReport.value: "", + ReportType.CustomReport.value: "", + ReportType.SubtopicReport.value: "" +} + +cli.add_argument( + "--report_type", + type=str, + help="The type of report to generate. Options:\n" + "\n".join( + f" {choice}: {report_type_descriptions[choice]}" for choice in choices + ), + # Deserialize ReportType as a List of strings: + choices=choices, + required=True) + +# First, let's see what values are actually in the Tone enum +print([t.value for t in Tone]) + +cli.add_argument( + "--tone", + type=str, + help="The tone of the report (optional).", + choices=["objective", "formal", "analytical", "persuasive", "informative", + "explanatory", "descriptive", "critical", "comparative", "speculative", + "reflective", "narrative", "humorous", "optimistic", "pessimistic"], + default="objective" +) + +# ============================================================================= +# Main +# ============================================================================= + + +async def main(args): + """ + Conduct research on the given query, generate the report, and write + it as a markdown file to the output directory. + """ + if args.report_type == 'detailed_report': + detailed_report = DetailedReport( + query=args.query, + report_type="research_report", + report_source="web_search", + ) + + report = await detailed_report.run() + else: + # Convert the simple keyword to the full Tone enum value + tone_map = { + "objective": Tone.Objective, + "formal": Tone.Formal, + "analytical": Tone.Analytical, + "persuasive": Tone.Persuasive, + "informative": Tone.Informative, + "explanatory": Tone.Explanatory, + "descriptive": Tone.Descriptive, + "critical": Tone.Critical, + "comparative": Tone.Comparative, + "speculative": Tone.Speculative, + "reflective": Tone.Reflective, + "narrative": Tone.Narrative, + "humorous": Tone.Humorous, + "optimistic": Tone.Optimistic, + "pessimistic": Tone.Pessimistic + } + + researcher = GPTResearcher( + query=args.query, + report_type=args.report_type, + tone=tone_map[args.tone] + ) + + await researcher.conduct_research() + + report = await researcher.write_report() + + # Write the report to a file + artifact_filepath = f"outputs/{uuid4()}.md" + os.makedirs("outputs", exist_ok=True) + with open(artifact_filepath, "w") as f: + f.write(report) + + print(f"Report written to '{artifact_filepath}'") + +if __name__ == "__main__": + load_dotenv() + args = cli.parse_args() + asyncio.run(main(args)) diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000000000000000000000000000000000000..62a416c8645d756bddfe5d4e52a1b3ac0892c2a1 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,48 @@ +services: + gpt-researcher: + pull_policy: build + image: gptresearcher/gpt-researcher + build: ./ + environment: + OPENAI_API_KEY: ${OPENAI_API_KEY} + TAVILY_API_KEY: ${TAVILY_API_KEY} + LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY} + LOGGING_LEVEL: INFO + volumes: + - ./outputs:/usr/src/app/outputs + restart: always + ports: + - 8000:8000 + gptr-nextjs: + pull_policy: build + image: gptresearcher/gptr-nextjs + stdin_open: true + environment: + CHOKIDAR_USEPOLLING: true + LOGGING_LEVEL: INFO + build: + dockerfile: Dockerfile.dev + context: frontend/nextjs + volumes: + - /app/node_modules + - ./frontend/nextjs:/app + - ./outputs:/app/outputs + restart: always + ports: + - 3000:3000 + + gpt-researcher-tests: + image: gptresearcher/gpt-researcher-tests + build: ./ + environment: + OPENAI_API_KEY: ${OPENAI_API_KEY} + TAVILY_API_KEY: ${TAVILY_API_KEY} + LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY} + LOGGING_LEVEL: INFO + profiles: ["test"] + command: > + /bin/sh -c " + pip install pytest pytest-asyncio faiss-cpu && + python -m pytest tests/report-types.py && + python -m pytest tests/vector-store.py + " diff --git a/docs/CNAME b/docs/CNAME new file mode 100644 index 0000000000000000000000000000000000000000..c5661873f63f93d752bab52acbd9296443f05926 --- /dev/null +++ b/docs/CNAME @@ -0,0 +1 @@ +docs.gptr.dev \ No newline at end of file diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000000000000000000000000000000000000..6cb2e952509f7258ad64bf2c7bfcfeb321d7a793 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,31 @@ +# Website + +This website is built using [Docusaurus 2](https://docusaurus.io/), a modern static website generator. + +## Prerequisites + +To build and test documentation locally, begin by downloading and installing [Node.js](https://nodejs.org/en/download/), and then installing [Yarn](https://classic.yarnpkg.com/en/). +On Windows, you can install via the npm package manager (npm) which comes bundled with Node.js: + +```console +npm install --global yarn +``` + +## Installation + +```console +pip install pydoc-markdown +cd website +yarn install +``` + +## Local Development + +Navigate to the website folder and run: + +```console +pydoc-markdown +yarn start +``` + +This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server. diff --git a/docs/babel.config.js b/docs/babel.config.js new file mode 100644 index 0000000000000000000000000000000000000000..92d391e31ebcc2f53ea66a6f2d2b1ec4737c11b7 --- /dev/null +++ b/docs/babel.config.js @@ -0,0 +1,3 @@ +module.exports = { + presets: [require.resolve('@docusaurus/core/lib/babel/preset')], +}; diff --git a/docs/blog/2023-09-22-gpt-researcher/architecture.png b/docs/blog/2023-09-22-gpt-researcher/architecture.png new file mode 100644 index 0000000000000000000000000000000000000000..0ad8847db3f74a4e9b792d8221ea7d4a9e6399fc --- /dev/null +++ b/docs/blog/2023-09-22-gpt-researcher/architecture.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:93f7c083722105b00dc714d372a1075f4d5770b46fa19551dc2b772738f82d89 +size 143143 diff --git a/docs/blog/2023-09-22-gpt-researcher/index.md b/docs/blog/2023-09-22-gpt-researcher/index.md new file mode 100644 index 0000000000000000000000000000000000000000..ebd004ec6f6e2fa442e6940ae36457131dec3d36 --- /dev/null +++ b/docs/blog/2023-09-22-gpt-researcher/index.md @@ -0,0 +1,88 @@ +--- +slug: building-gpt-researcher +title: How we built GPT Researcher +authors: [assafe] +tags: [gpt-researcher, autonomous-agent, opensource, github] +--- + +After [AutoGPT](https://github.com/Significant-Gravitas/AutoGPT) was published, we immediately took it for a spin. The first use case that came to mind was autonomous online research. Forming objective conclusions for manual research tasks can take time, sometimes weeks, to find the right resources and information. Seeing how well AutoGPT created tasks and executed them got me thinking about the great potential of using AI to conduct comprehensive research and what it meant for the future of online research. + +But the problem with AutoGPT was that it usually ran into never-ending loops, required human interference for almost every step, constantly lost track of its progress, and almost never actually completed the task. + +Nonetheless, the information and context gathered during the research task were lost (such as keeping track of sources), and sometimes hallucinated. + +The passion for leveraging AI for online research and the limitations I found put me on a mission to try and solve it while sharing my work with the world. This is when I created [GPT Researcher](https://github.com/assafelovic/gpt-researcher) — an open source autonomous agent for online comprehensive research. + +In this article, we will share the steps that guided me toward the proposed solution. + +### Moving from infinite loops to deterministic results +The first step in solving these issues was to seek a more deterministic solution that could ultimately guarantee completing any research task within a fixed time frame, without human interference. + +This is when we stumbled upon the recent paper [Plan and Solve](https://arxiv.org/abs/2305.04091). The paper aims to provide a better solution for the challenges stated above. The idea is quite simple and consists of two components: first, devising a plan to divide the entire task into smaller subtasks and then carrying out the subtasks according to the plan. + +![Planner-Excutor-Model](./planner.jpeg) + +As it relates to research, first create an outline of questions to research related to the task, and then deterministically execute an agent for every outline item. This approach eliminates the uncertainty in task completion by breaking the agent steps into a deterministic finite set of tasks. Once all tasks are completed, the agent concludes the research. + +Following this strategy has improved the reliability of completing research tasks to 100%. Now the challenge is, how to improve quality and speed? + +### Aiming for objective and unbiased results +The biggest challenge with LLMs is the lack of factuality and unbiased responses caused by hallucinations and out-of-date training sets (GPT is currently trained on datasets from 2021). But the irony is that for research tasks, it is crucial to optimize for these exact two criteria: factuality and bias. + +To tackle this challenges, we assumed the following: + +- Law of large numbers — More content will lead to less biased results. Especially if gathered properly. +- Leveraging LLMs for the summarization of factual information can significantly improve the overall better factuality of results. + +After experimenting with LLMs for quite some time, we can say that the areas where foundation models excel are in the summarization and rewriting of given content. So, in theory, if LLMs only review given content and summarize and rewrite it, potentially it would reduce hallucinations significantly. + +In addition, assuming the given content is unbiased, or at least holds opinions and information from all sides of a topic, the rewritten result would also be unbiased. So how can content be unbiased? The [law of large numbers](https://en.wikipedia.org/wiki/Law_of_large_numbers). In other words, if enough sites that hold relevant information are scraped, the possibility of biased information reduces greatly. So the idea would be to scrape just enough sites together to form an objective opinion on any topic. + +Great! Sounds like, for now, we have an idea for how to create both deterministic, factual, and unbiased results. But what about the speed problem? + +### Speeding up the research process +Another issue with AutoGPT is that it works synchronously. The main idea of it is to create a list of tasks and then execute them one by one. So if, let’s say, a research task requires visiting 20 sites, and each site takes around one minute to scrape and summarize, the overall research task would take a minimum of +20 minutes. That’s assuming it ever stops. But what if we could parallelize agent work? + +By levering Python libraries such as asyncio, the agent tasks have been optimized to work in parallel, thus significantly reducing the time to research. + +```python +# Create a list to hold the coroutine agent tasks +tasks = [async_browse(url, query, self.websocket) for url in await new_search_urls] + +# Gather the results as they become available +responses = await asyncio.gather(*tasks, return_exceptions=True) +``` + +In the example above, we trigger scraping for all URLs in parallel, and only once all is done, continue with the task. Based on many tests, an average research task takes around three minutes (!!). That’s 85% faster than AutoGPT. + +### Finalizing the research report +Finally, after aggregating as much information as possible about a given research task, the challenge is to write a comprehensive report about it. + +After experimenting with several OpenAI models and even open source, I’ve concluded that the best results are currently achieved with GPT-4. The task is straightforward — provide GPT-4 as context with all the aggregated information, and ask it to write a detailed report about it given the original research task. + +The prompt is as follows: +```commandline +"{research_summary}" Using the above information, answer the following question or topic: "{question}" in a detailed report — The report should focus on the answer to the question, should be well structured, informative, in depth, with facts and numbers if available, a minimum of 1,200 words and with markdown syntax and apa format. Write all source urls at the end of the report in apa format. You should write your report only based on the given information and nothing else. +``` + +The results are quite impressive, with some minor hallucinations in very few samples, but it’s fair to assume that as GPT improves over time, results will only get better. + +### The final architecture +Now that we’ve reviewed the necessary steps of GPT Researcher, let’s break down the final architecture, as shown below: + +
+ +
+ +More specifically: +- Generate an outline of research questions that form an objective opinion on any given task. +- For each research question, trigger a crawler agent that scrapes online resources for information relevant to the given task. +- For each scraped resource, keep track, filter, and summarize only if it includes relevant information. +- Finally, aggregate all summarized sources and generate a final research report. + +### Going forward +The future of online research automation is heading toward a major disruption. As AI continues to improve, it is only a matter of time before AI agents can perform comprehensive research tasks for any of our day-to-day needs. AI research can disrupt areas of finance, legal, academia, health, and retail, reducing our time for each research by 95% while optimizing for factual and unbiased reports within an influx and overload of ever-growing online information. + +Imagine if an AI can eventually understand and analyze any form of online content — videos, images, graphs, tables, reviews, text, audio. And imagine if it could support and analyze hundreds of thousands of words of aggregated information within a single prompt. Even imagine that AI can eventually improve in reasoning and analysis, making it much more suitable for reaching new and innovative research conclusions. And that it can do all that in minutes, if not seconds. + +It’s all a matter of time and what [GPT Researcher](https://github.com/assafelovic/gpt-researcher) is all about. diff --git a/docs/blog/2023-09-22-gpt-researcher/planner.jpeg b/docs/blog/2023-09-22-gpt-researcher/planner.jpeg new file mode 100644 index 0000000000000000000000000000000000000000..2a5d3892ed06e95753562ff652173b7a9bd85163 --- /dev/null +++ b/docs/blog/2023-09-22-gpt-researcher/planner.jpeg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8686560147e72d08dbdf97fd16fe2934d1621a08c881ceb6bf8fd3652f81a31b +size 135677 diff --git a/docs/blog/2023-11-12-openai-assistant/diagram-1.png b/docs/blog/2023-11-12-openai-assistant/diagram-1.png new file mode 100644 index 0000000000000000000000000000000000000000..31a46a5f9bcafed1360350e1a2a855baac0582f2 Binary files /dev/null and b/docs/blog/2023-11-12-openai-assistant/diagram-1.png differ diff --git a/docs/blog/2023-11-12-openai-assistant/diagram-assistant.jpeg b/docs/blog/2023-11-12-openai-assistant/diagram-assistant.jpeg new file mode 100644 index 0000000000000000000000000000000000000000..f467f5bc75ef46b81a6d4cf804cbd2eb227872f6 Binary files /dev/null and b/docs/blog/2023-11-12-openai-assistant/diagram-assistant.jpeg differ diff --git a/docs/blog/2023-11-12-openai-assistant/index.md b/docs/blog/2023-11-12-openai-assistant/index.md new file mode 100644 index 0000000000000000000000000000000000000000..85d48ccda303b3a0b4adbf4e38373b42948ba251 --- /dev/null +++ b/docs/blog/2023-11-12-openai-assistant/index.md @@ -0,0 +1,259 @@ +--- +slug: building-openai-assistant +title: How to build an OpenAI Assistant with Internet access +authors: [assafe] +tags: [tavily, search-api, openai, assistant-api] +--- + +OpenAI has done it again with a [groundbreaking DevDay](https://openai.com/blog/new-models-and-developer-products-announced-at-devday) showcasing some of the latest improvements to the OpenAI suite of tools, products and services. One major release was the new [Assistants API](https://platform.openai.com/docs/assistants/overview) that makes it easier for developers to build their own assistive AI apps that have goals and can call models and tools. + +The new Assistants API currently supports three types of tools: Code Interpreter, Retrieval, and Function calling. Although you might expect the Retrieval tool to support online information retrieval (such as search APIs or as ChatGPT plugins), it only supports raw data for now such as text or CSV files. + +This blog will demonstrate how to leverage the latest Assistants API with online information using the function calling tool. + +To skip the tutorial below, feel free to check out the full [Github Gist here](https://gist.github.com/assafelovic/579822cd42d52d80db1e1c1ff82ffffd). + +At a high level, a typical integration of the Assistants API has the following steps: + +- Create an [Assistant](https://platform.openai.com/docs/api-reference/assistants/createAssistant) in the API by defining its custom instructions and picking a model. If helpful, enable tools like Code Interpreter, Retrieval, and Function calling. +- Create a [Thread](https://platform.openai.com/docs/api-reference/threads) when a user starts a conversation. +- Add [Messages](https://platform.openai.com/docs/api-reference/messages) to the Thread as the user ask questions. +- [Run](https://platform.openai.com/docs/api-reference/runs) the Assistant on the Thread to trigger responses. This automatically calls the relevant tools. + +As you can see below, an Assistant object includes Threads for storing and handling conversation sessions between the assistant and users, and Run for invocation of an Assistant on a Thread. + +![OpenAI Assistant Object](./diagram-assistant.jpeg) + +Let’s go ahead and implement these steps one by one! For the example, we will build a finance GPT that can provide insights about financial questions. We will use the [OpenAI Python SDK v1.2](https://github.com/openai/openai-python/tree/main#installation) and [Tavily Search API](https://tavily.com). + +First things first, let’s define the assistant’s instructions: + +```python +assistant_prompt_instruction = """You are a finance expert. +Your goal is to provide answers based on information from the internet. +You must use the provided Tavily search API function to find relevant online information. +You should never use your own knowledge to answer questions. +Please include relevant url sources in the end of your answers. +""" +``` +Next, let’s finalize step 1 and create an assistant using the latest [GPT-4 Turbo model](https://github.com/openai/openai-python/tree/main#installation) (128K context), and the call function using the [Tavily web search API](https://tavily.com/): + +```python +# Create an assistant +assistant = client.beta.assistants.create( + instructions=assistant_prompt_instruction, + model="gpt-4-1106-preview", + tools=[{ + "type": "function", + "function": { + "name": "tavily_search", + "description": "Get information on recent events from the web.", + "parameters": { + "type": "object", + "properties": { + "query": {"type": "string", "description": "The search query to use. For example: 'Latest news on Nvidia stock performance'"}, + }, + "required": ["query"] + } + } + }] +) +``` + +Step 2+3 are quite straight forward, we’ll initiate a new thread and update it with a user message: + +```python +thread = client.beta.threads.create() +user_input = input("You: ") +message = client.beta.threads.messages.create( + thread_id=thread.id, + role="user", + content=user_input, +) +``` + +Finally, we’ll run the assistant on the thread to trigger the function call and get the response: + +```python +run = client.beta.threads.runs.create( + thread_id=thread.id, + assistant_id=assistant_id, +) +``` + +So far so good! But this is where it gets a bit messy. Unlike with the regular GPT APIs, the Assistants API doesn’t return a synchronous response, but returns a status. This allows for asynchronous operations across assistants, but requires more overhead for fetching statuses and dealing with each manually. + +![Status Diagram](./diagram-1.png) + +To manage this status lifecycle, let’s build a function that can be reused and handles waiting for various statuses (such as ‘requires_action’): + +```python +# Function to wait for a run to complete +def wait_for_run_completion(thread_id, run_id): + while True: + time.sleep(1) + run = client.beta.threads.runs.retrieve(thread_id=thread_id, run_id=run_id) + print(f"Current run status: {run.status}") + if run.status in ['completed', 'failed', 'requires_action']: + return run +``` + +This function will sleep as long as the run has not been finalized such as in cases where it’s completed or requires an action from a function call. + +We’re almost there! Lastly, let’s take care of when the assistant wants to call the web search API: + +```python +# Function to handle tool output submission +def submit_tool_outputs(thread_id, run_id, tools_to_call): + tool_output_array = [] + for tool in tools_to_call: + output = None + tool_call_id = tool.id + function_name = tool.function.name + function_args = tool.function.arguments + + if function_name == "tavily_search": + output = tavily_search(query=json.loads(function_args)["query"]) + + if output: + tool_output_array.append({"tool_call_id": tool_call_id, "output": output}) + + return client.beta.threads.runs.submit_tool_outputs( + thread_id=thread_id, + run_id=run_id, + tool_outputs=tool_output_array + ) +``` + +As seen above, if the assistant has reasoned that a function call should trigger, we extract the given required function params and pass back to the runnable thread. We catch this status and call our functions as seen below: + +```python +if run.status == 'requires_action': + run = submit_tool_outputs(thread.id, run.id, run.required_action.submit_tool_outputs.tool_calls) + run = wait_for_run_completion(thread.id, run.id) +``` + +That’s it! We now have a working OpenAI Assistant that can be used to answer financial questions using real time online information. Below is the full runnable code: + +```python +import os +import json +import time +from openai import OpenAI +from tavily import TavilyClient + +# Initialize clients with API keys +client = OpenAI(api_key=os.environ["OPENAI_API_KEY"]) +tavily_client = TavilyClient(api_key=os.environ["TAVILY_API_KEY"]) + +assistant_prompt_instruction = """You are a finance expert. +Your goal is to provide answers based on information from the internet. +You must use the provided Tavily search API function to find relevant online information. +You should never use your own knowledge to answer questions. +Please include relevant url sources in the end of your answers. +""" + +# Function to perform a Tavily search +def tavily_search(query): + search_result = tavily_client.get_search_context(query, search_depth="advanced", max_tokens=8000) + return search_result + +# Function to wait for a run to complete +def wait_for_run_completion(thread_id, run_id): + while True: + time.sleep(1) + run = client.beta.threads.runs.retrieve(thread_id=thread_id, run_id=run_id) + print(f"Current run status: {run.status}") + if run.status in ['completed', 'failed', 'requires_action']: + return run + +# Function to handle tool output submission +def submit_tool_outputs(thread_id, run_id, tools_to_call): + tool_output_array = [] + for tool in tools_to_call: + output = None + tool_call_id = tool.id + function_name = tool.function.name + function_args = tool.function.arguments + + if function_name == "tavily_search": + output = tavily_search(query=json.loads(function_args)["query"]) + + if output: + tool_output_array.append({"tool_call_id": tool_call_id, "output": output}) + + return client.beta.threads.runs.submit_tool_outputs( + thread_id=thread_id, + run_id=run_id, + tool_outputs=tool_output_array + ) + +# Function to print messages from a thread +def print_messages_from_thread(thread_id): + messages = client.beta.threads.messages.list(thread_id=thread_id) + for msg in messages: + print(f"{msg.role}: {msg.content[0].text.value}") + +# Create an assistant +assistant = client.beta.assistants.create( + instructions=assistant_prompt_instruction, + model="gpt-4-1106-preview", + tools=[{ + "type": "function", + "function": { + "name": "tavily_search", + "description": "Get information on recent events from the web.", + "parameters": { + "type": "object", + "properties": { + "query": {"type": "string", "description": "The search query to use. For example: 'Latest news on Nvidia stock performance'"}, + }, + "required": ["query"] + } + } + }] +) +assistant_id = assistant.id +print(f"Assistant ID: {assistant_id}") + +# Create a thread +thread = client.beta.threads.create() +print(f"Thread: {thread}") + +# Ongoing conversation loop +while True: + user_input = input("You: ") + if user_input.lower() == 'exit': + break + + # Create a message + message = client.beta.threads.messages.create( + thread_id=thread.id, + role="user", + content=user_input, + ) + + # Create a run + run = client.beta.threads.runs.create( + thread_id=thread.id, + assistant_id=assistant_id, + ) + print(f"Run ID: {run.id}") + + # Wait for run to complete + run = wait_for_run_completion(thread.id, run.id) + + if run.status == 'failed': + print(run.error) + continue + elif run.status == 'requires_action': + run = submit_tool_outputs(thread.id, run.id, run.required_action.submit_tool_outputs.tool_calls) + run = wait_for_run_completion(thread.id, run.id) + + # Print messages from the thread + print_messages_from_thread(thread.id) +``` + +The assistant can be further customized and improved using additional retrieval information, OpenAI’s coding interpreter and more. Also, you can go ahead and add more function tools to make the assistant even smarter. + +Feel free to drop a comment below if you have any further questions! diff --git a/docs/blog/2024-05-19-gptr-langgraph/architecture.jpeg b/docs/blog/2024-05-19-gptr-langgraph/architecture.jpeg new file mode 100644 index 0000000000000000000000000000000000000000..76f70d482d3bd064d715c5f9133036ba4f5e3d31 Binary files /dev/null and b/docs/blog/2024-05-19-gptr-langgraph/architecture.jpeg differ diff --git a/docs/blog/2024-05-19-gptr-langgraph/blog-langgraph.jpeg b/docs/blog/2024-05-19-gptr-langgraph/blog-langgraph.jpeg new file mode 100644 index 0000000000000000000000000000000000000000..e07f5f8b627824237297f1e3142268e634263c07 --- /dev/null +++ b/docs/blog/2024-05-19-gptr-langgraph/blog-langgraph.jpeg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:58f12de290d61f39177ef2e9c6f284f7153f6860d2e97136036ff0b94df3b2ad +size 391989 diff --git a/docs/blog/2024-05-19-gptr-langgraph/index.md b/docs/blog/2024-05-19-gptr-langgraph/index.md new file mode 100644 index 0000000000000000000000000000000000000000..aa29e196fab96ab9f773000aae4ddeed3f8a1276 --- /dev/null +++ b/docs/blog/2024-05-19-gptr-langgraph/index.md @@ -0,0 +1,223 @@ +--- +slug: gptr-langgraph +title: How to Build the Ultimate Research Multi-Agent Assistant +authors: [assafe] +tags: [multi-skills, gpt-researcher, langchain, langgraph] +--- +![Header](./blog-langgraph.jpeg) +# Introducing the GPT Researcher Multi-Agent Assistant +### Learn how to build an autonomous research assistant using LangGraph with a team of specialized AI agents + +It has only been a year since the initial release of GPT Researcher, but methods for building, testing, and deploying AI agents have already evolved significantly. That’s just the nature and speed of the current AI progress. What started as simple zero-shot or few-shot prompting, has quickly evolved to agent function calling, RAG and now finally agentic workflows (aka “flow engineering”). + +Andrew Ng has [recently stated](https://www.deeplearning.ai/the-batch/how-agents-can-improve-llm-performance/), “I think AI agent workflows will drive massive AI progress this year — perhaps even more than the next generation of foundation models. This is an important trend, and I urge everyone who works in AI to pay attention to it.” + +In this article you will learn why multi-agent workflows are the current best standard and how to build the optimal autonomous research multi-agent assistant using LangGraph. + +To skip this tutorial, feel free to check out the Github repo of [GPT Researcher x LangGraph](https://github.com/assafelovic/gpt-researcher/tree/master/multi_agents). + +## Introducing LangGraph +LangGraph is an extension of LangChain aimed at creating agent and multi-agent flows. It adds in the ability to create cyclical flows and comes with memory built in — both important attributes for creating agents. + +LangGraph provides developers with a high degree of controllability and is important for creating custom agents and flows. Nearly all agents in production are customized towards the specific use case they are trying solve. LangGraph gives you the flexibility to create arbitrary customized agents, while providing an intuitive developer experience for doing so. + +Enough with the smalltalk, let’s start building! + +## Building the Ultimate Autonomous Research Agent +By leveraging LangGraph, the research process can be significantly improved in depth and quality by leveraging multiple agents with specialized skills. Having every agent focus and specialize only a specific skill, allows for better separation of concerns, customizability, and further development at scale as the project grows. + +Inspired by the recent STORM paper, this example showcases how a team of AI agents can work together to conduct research on a given topic, from planning to publication. This example will also leverage the leading autonomous research agent GPT Researcher. + +### The Research Agent Team +The research team consists of seven LLM agents: + +* **Chief Editor** — Oversees the research process and manages the team. This is the “master” agent that coordinates the other agents using LangGraph. This agent acts as the main LangGraph interface. +* **GPT Researcher** — A specialized autonomous agent that conducts in depth research on a given topic. +* **Editor** — Responsible for planning the research outline and structure. +* **Reviewer** — Validates the correctness of the research results given a set of criteria. +* **Reviser** — Revises the research results based on the feedback from the reviewer. +* **Writer** — Responsible for compiling and writing the final report. +* **Publisher** — Responsible for publishing the final report in various formats. + +### Architecture +As seen below, the automation process is based on the following stages: Planning the research, data collection and analysis, review and revision, writing the report and finally publication: + +![Architecture](./architecture.jpeg) + +More specifically the process is as follows: + +* **Browser (gpt-researcher)** — Browses the internet for initial research based on the given research task. This step is crucial for LLMs to plan the research process based on up to date and relevant information, and not rely solely on pre-trained data for a given task or topic. +* **Editor** — Plans the report outline and structure based on the initial research. The Editor is also responsible for triggering the parallel research tasks based on the planned outline. +* For each outline topic (in parallel): + * **Researcher (gpt-researcher)** — Runs an in depth research on the subtopics and writes a draft. This agent leverages the GPT Researcher Python package under the hood, for optimized, in depth and factual research report. + * **Reviewer** — Validates the correctness of the draft given a set of guidelines and provides feedback to the reviser (if any). + * **Reviser** — Revises the draft until it is satisfactory based on the reviewer feedback. +* **Writer** — Compiles and writes the final report including an introduction, conclusion and references section from the given research findings. +* **Publisher** — Publishes the final report to multi formats such as PDF, Docx, Markdown, etc. + +* We will not dive into all the code since there’s a lot of it, but focus mostly on the interesting parts I’ve found valuable to share. + +## Define the Graph State +One of my favorite features with LangGraph is state management. States in LangGraph are facilitated through a structured approach where developers define a GraphState that encapsulates the entire state of the application. Each node in the graph can modify this state, allowing for dynamic responses based on the evolving context of the interaction. + +Like in every start of a technical design, considering the data schema throughout the application is key. In this case we’ll define a ResearchState like so: + +```python +class ResearchState(TypedDict): + task: dict + initial_research: str + sections: List[str] + research_data: List[dict] + # Report layout + title: str + headers: dict + date: str + table_of_contents: str + introduction: str + conclusion: str + sources: List[str] + report: str +``` + +As seen above, the state is divided into two main areas: the research task and the report layout content. As data circulates through the graph agents, each agent will, in turn, generate new data based on the existing state and update it for subsequent processing further down the graph with other agents. + +We can then initialize the graph with the following: + + +```python +from langgraph.graph import StateGraph +workflow = StateGraph(ResearchState) +``` + +Initializing the graph with LangGraph +As stated above, one of the great things about multi-agent development is building each agent to have specialized and scoped skills. Let’s take an example of the Researcher agent using GPT Researcher python package: + +```python +from gpt_researcher import GPTResearcher + +class ResearchAgent: + def __init__(self): + pass + + async def research(self, query: str): + # Initialize the researcher + researcher = GPTResearcher(parent_query=parent_query, query=query, report_type=research_report, config_path=None) + # Conduct research on the given query + await researcher.conduct_research() + # Write the report + report = await researcher.write_report() + + return report +``` + +As you can see above, we’ve created an instance of the Research agent. Now let’s assume we’ve done the same for each of the team’s agent. After creating all of the agents, we’d initialize the graph with LangGraph: + +```python +def init_research_team(self): + # Initialize skills + editor_agent = EditorAgent(self.task) + research_agent = ResearchAgent() + writer_agent = WriterAgent() + publisher_agent = PublisherAgent(self.output_dir) + + # Define a Langchain StateGraph with the ResearchState + workflow = StateGraph(ResearchState) + + # Add nodes for each agent + workflow.add_node("browser", research_agent.run_initial_research) + workflow.add_node("planner", editor_agent.plan_research) + workflow.add_node("researcher", editor_agent.run_parallel_research) + workflow.add_node("writer", writer_agent.run) + workflow.add_node("publisher", publisher_agent.run) + + workflow.add_edge('browser', 'planner') + workflow.add_edge('planner', 'researcher') + workflow.add_edge('researcher', 'writer') + workflow.add_edge('writer', 'publisher') + + # set up start and end nodes + workflow.set_entry_point("browser") + workflow.add_edge('publisher', END) + + return workflow +``` + +As seen above, creating the LangGraph graph is very straight forward and consists of three main functions: add_node, add_edge and set_entry_point. With these main functions you can first add the nodes to the graph, connect the edges and finally set the starting point. + +Focus check: If you’ve been following the code and architecture properly, you’ll notice that the Reviewer and Reviser agents are missing in the initialization above. Let’s dive into it! + +## A Graph within a Graph to support stateful Parallelization +This was the most exciting part of my experience working with LangGraph! One exciting feature of this autonomous assistant is having a parallel run for each research task, that would be reviewed and revised based on a set of predefined guidelines. + +Knowing how to leverage parallel work within a process is key for optimizing speed. But how would you trigger parallel agent work if all agents report to the same state? This can cause race conditions and inconsistencies in the final data report. To solve this, you can create a sub graph, that would be triggered from the main LangGraph instance. This sub graph would hold its own state for each parallel run, and that would solve the issues that were raised. + +As we’ve done before, let’s define the LangGraph state and its agents. Since this sub graph basically reviews and revises a research draft, we’ll define the state with draft information: + +```python +class DraftState(TypedDict): + task: dict + topic: str + draft: dict + review: str + revision_notes: str +``` + +As seen in the DraftState, we mostly care about the topic discussed, and the reviewer and revision notes as they communicate between each other to finalize the subtopic research report. To create the circular condition we’ll take advantage of the last important piece of LangGraph which is conditional edges: + +```python +async def run_parallel_research(self, research_state: dict): + workflow = StateGraph(DraftState) + + workflow.add_node("researcher", research_agent.run_depth_research) + workflow.add_node("reviewer", reviewer_agent.run) + workflow.add_node("reviser", reviser_agent.run) + + # set up edges researcher->reviewer->reviser->reviewer... + workflow.set_entry_point("researcher") + workflow.add_edge('researcher', 'reviewer') + workflow.add_edge('reviser', 'reviewer') + workflow.add_conditional_edges('reviewer', + (lambda draft: "accept" if draft['review'] is None else "revise"), + {"accept": END, "revise": "reviser"}) +``` + +By defining the conditional edges, the graph would direct to reviser if there exists review notes by the reviewer, or the cycle would end with the final draft. If you go back to the main graph we’ve built, you’ll see that this parallel work is under a node named “researcher” called by ChiefEditor agent. + +Running the Research Assistant +After finalizing the agents, states and graphs, it’s time to run our research assistant! To make it easier to customize, the assistant runs with a given task.json file: + +```json +{ + "query": "Is AI in a hype cycle?", + "max_sections": 3, + "publish_formats": { + "markdown": true, + "pdf": true, + "docx": true + }, + "follow_guidelines": false, + "model": "gpt-4-turbo", + "guidelines": [ + "The report MUST be written in APA format", + "Each sub section MUST include supporting sources using hyperlinks. If none exist, erase the sub section or rewrite it to be a part of the previous section", + "The report MUST be written in spanish" + ] +} +``` + +The task object is pretty self explanatory, however please notice that follow_guidelines if false would cause the graph to ignore the revision step and defined guidelines. Also, the max_sections field defines how many subheaders to research for. Having less will generate a shorter report. + +Running the assistant will result in a final research report in formats such as Markdown, PDF and Docx. + +To download and run the example check out the GPT Researcher x LangGraph [open source page](https://github.com/assafelovic/gpt-researcher/tree/master/multi_agents). + +## What’s Next? +Going forward, there are super exciting things to think about. Human in the loop is key for optimized AI experiences. Having a human help the assistant revise and focus on just the right research plan, topics and outline, would enhance the overall quality and experience. Also generally, aiming for relying on human intervention throughout the AI flow ensures correctness, sense of control and deterministic results. Happy to see that LangGraph already supports this out of the box as seen here. + +In addition, having support for research about both web and local data would be key for many types of business and personal use cases. + +Lastly, more efforts can be done to improve the quality of retrieved sources and making sure the final report is built in the optimal storyline. + +A step forward in LangGraph and multi-agent collaboration in a whole would be where assistants can plan and generate graphs dynamically based on given tasks. This vision would allow assistants to choose only a subset of agents for a given task and plan their strategy based on the graph fundamentals as presented in this article and open a whole new world of possibilities. Given the pace of innovation in the AI space, it won’t be long before a new disruptive version of GPT Researcher is launched. Looking forward to what the future brings! + +To keep track of this project’s ongoing progress and updates please join our Discord community. And as always, if you have any feedback or further questions, please comment below! \ No newline at end of file diff --git a/docs/blog/2024-09-7-hybrid-research/gptr-hybrid.png b/docs/blog/2024-09-7-hybrid-research/gptr-hybrid.png new file mode 100644 index 0000000000000000000000000000000000000000..6f61df534085d6b1c284b90837e1839566928321 --- /dev/null +++ b/docs/blog/2024-09-7-hybrid-research/gptr-hybrid.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c5aa8651fbac012c82ac12d1009ef10a170ee1a356b2a7898858f920a2f6b17a +size 194577 diff --git a/docs/blog/2024-09-7-hybrid-research/index.md b/docs/blog/2024-09-7-hybrid-research/index.md new file mode 100644 index 0000000000000000000000000000000000000000..6374d50b1bec0ef59dd424ccdb246636dcabe984 --- /dev/null +++ b/docs/blog/2024-09-7-hybrid-research/index.md @@ -0,0 +1,182 @@ +--- +slug: gptr-hybrid +title: The Future of Research is Hybrid +authors: [assafe] +tags: [hybrid-research, gpt-researcher, langchain, langgraph, tavily] +image: https://miro.medium.com/v2/resize:fit:1400/1*NgVIlZVSePqrK5EkB1wu4Q.png +--- +![Hyrbrid Research with GPT Researcher](https://miro.medium.com/v2/resize:fit:1400/1*MaauY1ecsD05nL8JqW0Zdg.jpeg) + +Over the past few years, we've seen an explosion of new AI tools designed to disrupt research. Some, like [ChatPDF](https://www.chatpdf.com/) and [Consensus](https://consensus.app), focus on extracting insights from documents. Others, such as [Perplexity](https://www.perplexity.ai/), excel at scouring the web for information. But here's the thing: none of these tools combine both web and local document search within a single contextual research pipeline. + +This is why I'm excited to introduce the latest advancements of **[GPT Researcher](https://gptr.dev)** — now able to conduct hybrid research on any given task and documents. + +Web driven research often lacks specific context, risks information overload, and may include outdated or unreliable data. On the flip side, local driven research is limited to historical data and existing knowledge, potentially creating organizational echo chambers and missing out on crucial market trends or competitor moves. Both approaches, when used in isolation, can lead to incomplete or biased insights, hampering your ability to make fully informed decisions. + +Today, we're going to change the game. By the end of this guide, you'll learn how to conduct hybrid research that combines the best of both worlds — web and local — enabling you to conduct more thorough, relevant, and insightful research. + +## Why Hybrid Research Works Better + +By combining web and local sources, hybrid research addresses these limitations and offers several key advantages: + +1. **Grounded context**: Local documents provide a foundation of verified, organization specific information. This grounds the research in established knowledge, reducing the risk of straying from core concepts or misinterpreting industry specific terminology. + + *Example*: A pharmaceutical company researching a new drug development opportunity can use its internal research papers and clinical trial data as a base, then supplement this with the latest published studies and regulatory updates from the web. + +2. **Enhanced accuracy**: Web sources offer up-to-date information, while local documents provide historical context. This combination allows for more accurate trend analysis and decision-making. + + *Example*: A financial services firm analyzing market trends can combine their historical trading data with real-time market news and social media sentiment analysis to make more informed investment decisions. + +3. **Reduced bias**: By drawing from both web and local sources, we mitigate the risk of bias that might be present in either source alone. + + *Example*: A tech company evaluating its product roadmap can balance internal feature requests and usage data with external customer reviews and competitor analysis, ensuring a well-rounded perspective. + +4. **Improved planning and reasoning**: LLMs can leverage the context from local documents to better plan their web research strategies and reason about the information they find online. + + *Example*: An AI-powered market research tool can use a company's past campaign data to guide its web search for current marketing trends, resulting in more relevant and actionable insights. + +5. **Customized insights**: Hybrid research allows for the integration of proprietary information with public data, leading to unique, organization-specific insights. + + *Example*: A retail chain can combine its sales data with web-scraped competitor pricing and economic indicators to optimize its pricing strategy in different regions. + +These are just a few examples for business use cases that can leverage hybrid research, but enough with the small talk — let's build! + +## Building the Hybrid Research Assistant + +Before we dive into the details, it's worth noting that GPT Researcher has the capability to conduct hybrid research out of the box! However, to truly appreciate how this works and to give you a deeper understanding of the process, we're going to take a look under the hood. + +![GPT Researcher hybrid research](./gptr-hybrid.png) + +GPT Researcher conducts web research based on an auto-generated plan from local documents, as seen in the architecture above. It then retrieves relevant information from both local and web data for the final research report. + +We'll explore how local documents are processed using LangChain, which is a key component of GPT Researcher's document handling. Then, we'll show you how to leverage GPT Researcher to conduct hybrid research, combining the advantages of web search with your local document knowledge base. + +### Processing Local Documents with Langchain + +LangChain provides a variety of document loaders that allow us to process different file types. This flexibility is crucial when dealing with diverse local documents. Here's how to set it up: + +```python +from langchain_community.document_loaders import ( + PyMuPDFLoader, + TextLoader, + UnstructuredCSVLoader, + UnstructuredExcelLoader, + UnstructuredMarkdownLoader, + UnstructuredPowerPointLoader, + UnstructuredWordDocumentLoader +) +from langchain.text_splitter import RecursiveCharacterTextSplitter +from langchain.embeddings import OpenAIEmbeddings +from langchain.vectorstores import Chroma + +def load_local_documents(file_paths): + documents = [] + for file_path in file_paths: + if file_path.endswith('.pdf'): + loader = PyMuPDFLoader(file_path) + elif file_path.endswith('.txt'): + loader = TextLoader(file_path) + elif file_path.endswith('.csv'): + loader = UnstructuredCSVLoader(file_path) + elif file_path.endswith('.xlsx'): + loader = UnstructuredExcelLoader(file_path) + elif file_path.endswith('.md'): + loader = UnstructuredMarkdownLoader(file_path) + elif file_path.endswith('.pptx'): + loader = UnstructuredPowerPointLoader(file_path) + elif file_path.endswith('.docx'): + loader = UnstructuredWordDocumentLoader(file_path) + else: + raise ValueError(f"Unsupported file type: {file_path}") + + documents.extend(loader.load()) + + return documents + +# Use the function to load your local documents +local_docs = load_local_documents(['company_report.pdf', 'meeting_notes.docx', 'data.csv']) + +# Split the documents into smaller chunks for more efficient processing +text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200) +splits = text_splitter.split_documents(local_docs) + +# Create embeddings and store them in a vector database for quick retrieval +embeddings = OpenAIEmbeddings() +vectorstore = Chroma.from_documents(documents=splits, embedding=embeddings) + +# Example of how to perform a similarity search +query = "What were the key points from our last strategy meeting?" +relevant_docs = vectorstore.similarity_search(query, k=3) + +for doc in relevant_docs: + print(doc.page_content) +``` + +### Conducting Web Research with GPT Researcher + +Now that we've learned how to work with local documents, let's take a quick look at how GPT Researcher works under the hood: + +![GPT Researcher Architecture](https://miro.medium.com/v2/resize:fit:1400/1*yFtT43N0GxL0TMKvjtYjug.png) + +As seen above, GPT Researcher creates a research plan based on the given task by generating potential research queries that can collectively provide an objective and broad overview of the topic. Once these queries are generated, GPT Researcher uses a search engine like Tavily to find relevant results. Each scraped result is then saved in a vector database. Finally, the top k chunks most related to the research task are retrieved to generate a final research report. + +GPT Researcher supports hybrid research, which involves an additional step of chunking local documents (implemented using Langchain) before retrieving the most related information. After numerous evaluations conducted by the community, we've found that hybrid research improved the correctness of final results by over 40%! + +### Running the Hybrid Research with GPT Researcher + +Now that you have a better understanding of how hybrid research works, let's demonstrate how easy this can be achieved with GPT Researcher. + +#### Step 1: Install GPT Researcher with PIP + +```bash +pip install gpt-researcher +``` + +#### Step 2: Setting up the environment + +We will run GPT Researcher with OpenAI as the LLM vendor and Tavily as the search engine. You'll need to obtain API keys for both before moving forward. Then, export the environment variables in your CLI as follows: + +```bash +export OPENAI_API_KEY={your-openai-key} +export TAVILY_API_KEY={your-tavily-key} +``` + +#### Step 3: Initialize GPT Researcher with hybrid research configuration + +GPT Researcher can be easily initialized with params that signal it to run a hybrid research. You can conduct many forms of research, head to the documentation page to learn more. + +To get GPT Researcher to run a hybrid research, you need to include all relevant files in my-docs directory (create it if it doesn't exist), and set the instance report_source to "hybrid" as seen below. Once the report source is set to hybrid, GPT Researcher will look for existing documents in the my-docs directory and include them in the research. If no documents exist, it will ignore it. + +```python +from gpt_researcher import GPTResearcher +import asyncio + +async def get_research_report(query: str, report_type: str, report_source: str) -> str: + researcher = GPTResearcher(query=query, report_type=report_type, report_source=report_source) + research = await researcher.conduct_research() + report = await researcher.write_report() + return report + +if __name__ == "__main__": + query = "How does our product roadmap compare to emerging market trends in our industry?" + report_source = "hybrid" + + report = asyncio.run(get_research_report(query=query, report_type="research_report", report_source=report_source)) + print(report) +``` + +As seen above, we can run the research on the following example: + +- Research task: "How does our product roadmap compare to emerging market trends in our industry?" +- Web: Current market trends, competitor announcements, and industry forecasts +- Local: Internal product roadmap documents and feature prioritization lists + +After various community evaluations we've found that the results of this research improve quality and correctness of research by over 40% and remove hallucinations by 50%. Moreover as stated above, local information helps the LLM improve planning reasoning allowing it to make better decisions and researching more relevant web sources. + +But wait, there's more! GPT Researcher also includes a sleek front-end app using NextJS and Tailwind. To learn how to get it running check out the documentation page. You can easily use drag and drop for documents to run hybrid research. + +## Conclusion + +Hybrid research represents a significant advancement in data gathering and decision making. By leveraging tools like [GPT Researcher](https://gptr.dev), teams can now conduct more comprehensive, context-aware, and actionable research. This approach addresses the limitations of using web or local sources in isolation, offering benefits such as grounded context, enhanced accuracy, reduced bias, improved planning and reasoning, and customized insights. + +The automation of hybrid research can enable teams to make faster, more data-driven decisions, ultimately enhancing productivity and offering a competitive advantage in analyzing an expanding pool of unstructured and dynamic information. \ No newline at end of file diff --git a/docs/blog/authors.yml b/docs/blog/authors.yml new file mode 100644 index 0000000000000000000000000000000000000000..846ba2f00962f641fd09c6e389c8a7ee684888f7 --- /dev/null +++ b/docs/blog/authors.yml @@ -0,0 +1,5 @@ +assafe: + name: Assaf Elovic + title: Creator @ GPT Researcher and Tavily + url: https://github.com/assafelovic + image_url: https://lh3.googleusercontent.com/a/ACg8ocJtrLku69VG_2Y0sJa5mt66gIGNaEBX5r_mgE6CRPEb7A=s96-c diff --git a/docs/docs/contribute.md b/docs/docs/contribute.md new file mode 100644 index 0000000000000000000000000000000000000000..fa9c9f14049ce646b76ad750a0c3e3196ff1b5f0 --- /dev/null +++ b/docs/docs/contribute.md @@ -0,0 +1,5 @@ +# Contribute + +We highly welcome contributions! Please check out [contributing](https://github.com/assafelovic/gpt-researcher/blob/master/CONTRIBUTING.md) if you're interested. + +Please check out our [roadmap](https://trello.com/b/3O7KBePw/gpt-researcher-roadmap) page and reach out to us via our [Discord community](https://discord.gg/QgZXvJAccX) if you're interested in joining our mission. \ No newline at end of file diff --git a/docs/docs/examples/detailed_report.md b/docs/docs/examples/detailed_report.md new file mode 100644 index 0000000000000000000000000000000000000000..e4e86eced0337c5c00f86c9112a1f236dc3d5d73 --- /dev/null +++ b/docs/docs/examples/detailed_report.md @@ -0,0 +1,82 @@ +# Detailed Report + +## Overview + +The `DetailedReport` class inspired by the recent STORM paper, is a powerful component of GPT Researcher, designed to generate comprehensive reports on complex topics. It's particularly useful for creating long-form content that exceeds the typical limits of LLM outputs. This class orchestrates the research process, breaking down the main query into subtopics, conducting in-depth research on each, and combining the results into a cohesive, detailed report. + +Located in `backend/report_types/detailed_report.py` in the [GPT Researcher GitHub repository](https://github.com/assafelovic/gpt-researcher), this class leverages the capabilities of the `GPTResearcher` agent to perform targeted research and generate content. + +## Key Features + +- Breaks down complex topics into manageable subtopics +- Conducts in-depth research on each subtopic +- Generates a comprehensive report with introduction, table of contents, and body +- Avoids redundancy by tracking previously written content +- Supports asynchronous operations for improved performance + +## Class Structure + +### Initialization + +The `DetailedReport` class is initialized with the following parameters: + +- `query`: The main research query +- `report_type`: Type of the report +- `report_source`: Source of the report +- `source_urls`: Initial list of source URLs +- `config_path`: Path to the configuration file +- `tone`: Tone of the report (using the `Tone` enum) +- `websocket`: WebSocket for real-time communication +- `subtopics`: Optional list of predefined subtopics +- `headers`: Optional headers for HTTP requests + +## How It Works + +1. The `DetailedReport` class starts by conducting initial research on the main query. +2. It then breaks down the topic into subtopics. +3. For each subtopic, it: + - Conducts focused research + - Generates draft section titles + - Retrieves relevant previously written content to avoid redundancy + - Writes a report section +4. Finally, it combines all subtopic reports, adds a table of contents, and includes source references to create the final detailed report. + +## Usage Example + +Here's how you can use the `DetailedReport` class in your project: + +```python +import asyncio +from fastapi import WebSocket +from gpt_researcher.utils.enum import Tone +from backend.report_type import DetailedReport + +async def generate_report(websocket: WebSocket): + detailed_report = DetailedReport( + query="The impact of artificial intelligence on modern healthcare", + report_type="research_report", + report_source="web_search", + source_urls=[], # You can provide initial source URLs if available + config_path="path/to/config.yaml", + tone=Tone.FORMAL, + websocket=websocket, + subtopics=[], # You can provide predefined subtopics if desired + headers={} # Add any necessary HTTP headers + ) + + final_report = await detailed_report.run() + return final_report + +# In your FastAPI app +@app.websocket("/generate_report") +async def websocket_endpoint(websocket: WebSocket): + await websocket.accept() + report = await generate_report(websocket) + await websocket.send_text(report) +``` + +This example demonstrates how to create a `DetailedReport` instance and run it to generate a comprehensive report on the impact of AI on healthcare. + +## Conclusion + +The `DetailedReport` class is a sophisticated tool for generating in-depth, well-structured reports on complex topics. By breaking down the main query into subtopics and leveraging the power of GPT Researcher, it can produce content that goes beyond the typical limitations of LLM outputs. This makes it an invaluable asset for researchers, content creators, and anyone needing detailed, well-researched information on a given topic. \ No newline at end of file diff --git a/docs/docs/examples/examples.ipynb b/docs/docs/examples/examples.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..fdcd33a48e2fba6b305ea42e8a7b6007b69bf907 --- /dev/null +++ b/docs/docs/examples/examples.ipynb @@ -0,0 +1,261 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "6ab73899", + "metadata": {}, + "source": [ + "# Tavily Samples" + ] + }, + { + "cell_type": "markdown", + "id": "013eda36", + "metadata": {}, + "source": [ + "## Setup" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8ad25551", + "metadata": { + "ExecuteTime": { + "end_time": "2023-11-08T15:57:13.339729Z", + "start_time": "2023-11-08T15:57:11.156595Z" + } + }, + "outputs": [], + "source": [ + "# install tavily\n", + "!pip install tavily-python" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "c0722950", + "metadata": { + "ExecuteTime": { + "end_time": "2023-11-08T16:01:01.318977Z", + "start_time": "2023-11-08T16:01:01.314688Z" + } + }, + "outputs": [], + "source": [ + "# import and connect\n", + "from tavily import TavilyClient\n", + "client = TavilyClient(api_key=\"\")" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "9328a188", + "metadata": { + "ExecuteTime": { + "end_time": "2023-11-08T16:02:25.587726Z", + "start_time": "2023-11-08T16:02:18.663961Z" + }, + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/plain": [ + "{'query': 'What happend in the latest burning man floods?',\n", + " 'follow_up_questions': ['How severe were the floods at Burning Man?',\n", + " 'What were the impacts of the floods?',\n", + " 'How did the organizers handle the floods at Burning Man?'],\n", + " 'answer': None,\n", + " 'images': None,\n", + " 'results': [{'content': \"This year’s rains opened the floodgates for Burning Man criticism Give Newsletters Site search Vox main menu Filed under: The Burning Man flameout, explained Climate change — and schadenfreude\\xa0— finally caught up to the survivalist cosplayers. Share this story Share Has Burning Man finally lost its glamour? September 1, after most of the scheduled events and live performances were canceled due to the weather, Burning Man organizers closed routes in and out of the area, forcing attendees to stay behindShare Attendees look at a rainbow over flooding on a desert plain on September 1, 2023, after heavy rains turned the annual Burning Man festival site in Nevada's Black Rock desert into a mud...\",\n", + " 'url': 'https://www.vox.com/culture/2023/9/6/23861675/burning-man-2023-mud-stranded-climate-change-playa-foot',\n", + " 'score': 0.9797,\n", + " 'raw_content': None},\n", + " {'content': 'Tens of thousands of Burning Man festivalgoers are slowly making their way home from the Nevada desert after muddy conditions from heavy rains made it nearly impossible to leave over the weekend. according to burningman.org. Though the death at this year\\'s Burning Man is still being investigated, a social media hoax was blamed for spreading rumors that it\\'s due to a breakout of Ebola. \"Thank goodness this community knows how to take care of each other,\" the Instagram page for Burning Man Information Radio wrote on a post predicting more rain.News Burning Man attendees make mass exodus after being stranded in the mud at festival A caravan of festivalgoers were backed up as much as eight hours when they were finally allowed to leave...',\n", + " 'url': 'https://www.today.com/news/what-is-burning-man-flood-death-rcna103231',\n", + " 'score': 0.9691,\n", + " 'raw_content': None},\n", + " {'content': '“It was a perfect, typical Burning Man weather until Friday — then the rain started coming down hard,\" said Phillip Martin, 37. \"Then it turned into Mud Fest.\" After more than a half-inch (1.3 centimeters) of rain fell Friday, flooding turned the playa to foot-deep mud — closing roads and forcing burners to lean on each other for help. ABC News Video Live Shows Election 2024 538 Stream on No longer stranded, tens of thousands clean up and head home after Burning Man floods Mark Fromson, 54, who goes by the name “Stuffy” on the playa, had been staying in an RV, but the rains forced him to find shelter at another camp, where fellow burners provided him food and cover.RENO, Nev. -- The traffic jam leaving the Burning Man festival eased up considerably Tuesday as the exodus from the mud-caked Nevada desert entered another day following massive rain that left tens of thousands of partygoers stranded for days.',\n", + " 'url': 'https://abcnews.go.com/US/wireStory/wait-times-exit-burning-man-drop-after-flooding-102936473',\n", + " 'score': 0.9648,\n", + " 'raw_content': None},\n", + " {'content': 'Burning Man hit by heavy rains, now mud soaked.People there told to conserve food and water as they shelter in place.(Video: Josh Keppel) pic.twitter.com/DuBj0Ejtb8 More on this story Burning Man revelers begin exodus from festival after road reopens Officials investigate death at Burning Man as thousands stranded by floods Burning Man festival-goers trapped in desert as rain turns site to mud Tens of thousands of ‘burners’ urged to conserve food and water as rain and flash floods sweep Nevada Burning Man festivalgoers surrounded by mud in Nevada desert – video Burning Man attendees roadblocked by climate activists: ‘They have a privileged mindset’Last year, Burning Man drew approximately 80,000 people. This year, only about 60,000 were expected - with many citing the usual heat and dust and eight-hour traffic jams when they tried to leave.',\n", + " 'url': 'https://www.theguardian.com/culture/2023/sep/02/burning-man-festival-mud-trapped-shelter-in-place',\n", + " 'score': 0.9618,\n", + " 'raw_content': None},\n", + " {'content': 'Skip links Live Navigation menu Live Death at Burning Man investigated in US, thousands stranded by flooding Attendees trudged through mud, many barefoot or wearing plastic bags on their feet. The revellers were urged to shelter in place and conserve food, water and other supplies. Thousands of festivalgoers remain stranded as organisers close vehicular traffic to the festival site following storm flooding in Nevada’s desert. Authorities in Nevada are investigating a death at the site of the Burning Man festival, where thousands of attendees remained stranded after flooding from storms swept through the Nevada desert in3 Sep 2023. Authorities in Nevada are investigating a death at the site of the Burning Man festival, where thousands of attendees remained stranded after flooding from storms swept through the ...',\n", + " 'url': 'https://www.aljazeera.com/news/2023/9/3/death-under-investigation-after-storm-flooding-at-burning-man-festival',\n", + " 'score': 0.9612,\n", + " 'raw_content': None}],\n", + " 'response_time': 6.23}" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# simple query using tavily's advanced search\n", + "client.search(\"What happend in the latest burning man floods?\", search_depth=\"advanced\")" + ] + }, + { + "cell_type": "markdown", + "id": "e98ea835", + "metadata": {}, + "source": [ + "## Sample 1: Reseach Report using Tavily and GPT-4 with Langchain" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b7b05128", + "metadata": {}, + "outputs": [], + "source": [ + "# install lanchain\n", + "!pip install langchain" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "b2246f61", + "metadata": { + "ExecuteTime": { + "end_time": "2023-11-08T16:57:59.797466Z", + "start_time": "2023-11-08T16:57:59.793194Z" + } + }, + "outputs": [], + "source": [ + "# set up openai api key\n", + "openai_api_key = \"\"" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "c574f1b8", + "metadata": { + "ExecuteTime": { + "end_time": "2023-11-08T16:59:03.572367Z", + "start_time": "2023-11-08T16:58:01.823114Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "# The Burning Man Festival 2023: A Festival Turned Mud Fest\n", + "\n", + "**Abstract:** The Burning Man Festival of 2023 in Nevada’s Black Rock desert will be remembered for a significant event: a heavy rainfall that turned the festival site into a muddy mess, testing the community spirit of the annual event attendees and stranding tens of thousands of festival-goers. \n", + "\n", + "**Keywords:** Burning Man Festival, flooding, rainfall, mud, community spirit, Nevada, Black Rock desert, stranded attendees, shelter\n", + "\n", + "---\n", + "## 1. Introduction\n", + "\n", + "The Burning Man Festival, an annual event known for its art installations, free spirit, and community ethos, faced an unprecedented challenge in 2023 due to heavy rains that flooded the festival site, turning it into a foot-deep mud pit[^1^][^2^]. The festival, held in Nevada's Black Rock desert, is known for its harsh weather conditions, including heat and dust, but this was the first time the event was affected to such an extent by rainfall[^4^].\n", + "\n", + "## 2. Impact of the Rain\n", + "\n", + "The heavy rains started on Friday, and more than a half-inch of rain fell, leading to flooding that turned the playa into a foot-deep mud pit[^2^]. The roads were closed due to the muddy conditions, stranding tens of thousands of festival-goers[^2^][^5^]. The burners, as the attendees are known, were forced to lean on each other for help[^2^].\n", + "\n", + "## 3. Community Spirit Tested\n", + "\n", + "The unexpected weather conditions put the Burning Man community spirit to the test[^1^]. Festival-goers found themselves sheltering in place, conserving food and water, and helping each other out[^3^]. For instance, Mark Fromson, who had been staying in an RV, was forced to find shelter at another camp due to the rains, where fellow burners provided him with food and cover[^2^].\n", + "\n", + "## 4. Exodus After Rain\n", + "\n", + "Despite the challenges, the festival-goers made the best of the situation. Once the rain stopped and things dried up a bit, the party quickly resumed[^3^]. A day later than scheduled, the massive wooden effigy known as the Man was set ablaze[^5^]. As the situation improved, thousands of Burning Man attendees began their mass exodus from the festival site[^5^].\n", + "\n", + "## 5. Conclusion\n", + "\n", + "The Burning Man Festival of 2023 will be remembered for the community spirit shown by the attendees in the face of heavy rainfall and flooding. Although the event was marred by the weather, the festival-goers managed to make the best of the situation, demonstrating the resilience and camaraderie that the Burning Man Festival is known for.\n", + "\n", + "---\n", + "**References**\n", + "\n", + "[^1^]: \"Attendees walk through a muddy desert plain...\" NPR. 2023. https://www.npr.org/2023/09/02/1197441202/burning-man-festival-rains-floods-stranded-nevada.\n", + "\n", + "[^2^]: “'It was a perfect, typical Burning Man weather until Friday...'\" ABC News. 2023. https://abcnews.go.com/US/wireStory/wait-times-exit-burning-man-drop-after-flooding-102936473.\n", + "\n", + "[^3^]: \"The latest on the Burning Man flooding...\" WUNC. 2023. https://www.wunc.org/2023-09-03/the-latest-on-the-burning-man-flooding.\n", + "\n", + "[^4^]: \"Burning Man hit by heavy rains, now mud soaked...\" The Guardian. 2023. https://www.theguardian.com/culture/2023/sep/02/burning-man-festival-mud-trapped-shelter-in-place.\n", + "\n", + "[^5^]: \"One day later than scheduled, the massive wooden effigy known as the Man was set ablaze...\" CNN. 2023. https://www.cnn.com/2023/09/05/us/burning-man-storms-shelter-exodus-tuesday/index.html.\n" + ] + } + ], + "source": [ + "# libraries\n", + "from langchain.adapters.openai import convert_openai_messages\n", + "from langchain_community.chat_models import ChatOpenAI\n", + "\n", + "# setup query\n", + "query = \"What happend in the latest burning man floods?\"\n", + "\n", + "# run tavily search\n", + "content = client.search(query, search_depth=\"advanced\")[\"results\"]\n", + "\n", + "# setup prompt\n", + "prompt = [{\n", + " \"role\": \"system\",\n", + " \"content\": f'You are an AI critical thinker research assistant. '\\\n", + " f'Your sole purpose is to write well written, critically acclaimed,'\\\n", + " f'objective and structured reports on given text.'\n", + "}, {\n", + " \"role\": \"user\",\n", + " \"content\": f'Information: \"\"\"{content}\"\"\"\\n\\n' \\\n", + " f'Using the above information, answer the following'\\\n", + " f'query: \"{query}\" in a detailed report --'\\\n", + " f'Please use MLA format and markdown syntax.'\n", + "}]\n", + "\n", + "# run gpt-4\n", + "lc_messages = convert_openai_messages(prompt)\n", + "report = ChatOpenAI(model='gpt-4',openai_api_key=openai_api_key).invoke(lc_messages).content\n", + "\n", + "# print report\n", + "print(report)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c679fbfe", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/examples/examples.md b/docs/docs/examples/examples.md new file mode 100644 index 0000000000000000000000000000000000000000..dfca2e92ce930c0575c933bb37024ea02f49ccfb --- /dev/null +++ b/docs/docs/examples/examples.md @@ -0,0 +1,31 @@ +# Simple Run + +### Run PIP Package +```python +from gpt_researcher import GPTResearcher +import asyncio + + +async def main(): + """ + This is a sample script that shows how to run a research report. + """ + # Query + query = "What happened in the latest burning man floods?" + + # Report Type + report_type = "research_report" + + # Initialize the researcher + researcher = GPTResearcher(query=query, report_type=report_type, config_path=None) + # Conduct research on the given query + await researcher.conduct_research() + # Write the report + report = await researcher.write_report() + + return report + + +if __name__ == "__main__": + asyncio.run(main()) +``` \ No newline at end of file diff --git a/docs/docs/examples/hybrid_research.md b/docs/docs/examples/hybrid_research.md new file mode 100644 index 0000000000000000000000000000000000000000..82519762e0fbdcd1a860f8adc3e735df5af49acd --- /dev/null +++ b/docs/docs/examples/hybrid_research.md @@ -0,0 +1,125 @@ +# Hybrid Research + +## Introduction + +GPT Researcher can combine web search capabilities with local document analysis to provide comprehensive, context-aware research results. + +This guide will walk you through the process of setting up and running hybrid research using GPT Researcher. + +## Prerequisites + +Before you begin, ensure you have the following: + +- Python 3.10 or higher installed on your system +- pip (Python package installer) +- An OpenAI API key (you can also choose other supported [LLMs](../gpt-researcher/llms/llms.md)) +- A Tavily API key (you can also choose other supported [Retrievers](../gpt-researcher/search-engines/retrievers.md)) + +## Installation + +```bash +pip install gpt-researcher +``` + +## Setting Up the Environment + +Export your API keys as environment variables: + +```bash +export OPENAI_API_KEY=your_openai_api_key_here +export TAVILY_API_KEY=your_tavily_api_key_here +``` + +Alternatively, you can set these in your Python script: + +```python +import os +os.environ['OPENAI_API_KEY'] = 'your_openai_api_key_here' +os.environ['TAVILY_API_KEY'] = 'your_tavily_api_key_here' +``` +Set the environment variable REPORT_SOURCE to an empty string "" in default.py +## Preparing Documents + +### 1. Local Documents +1. Create a directory named `my-docs` in your project folder. +2. Place all relevant local documents (PDFs, TXTs, DOCXs, etc.) in this directory. + +### 2. Online Documents +1. Here is an example of your online document URL example: https://xxxx.xxx.pdf (supports file formats like PDFs, TXTs, DOCXs, etc.) + + +## Running Hybrid Research By "Local Documents" + +Here's a basic script to run hybrid research: + +```python +from gpt_researcher import GPTResearcher +import asyncio + +async def get_research_report(query: str, report_type: str, report_source: str) -> str: + researcher = GPTResearcher(query=query, report_type=report_type, report_source=report_source) + research = await researcher.conduct_research() + report = await researcher.write_report() + return report + +if __name__ == "__main__": + query = "How does our product roadmap compare to emerging market trends in our industry?" + report_source = "hybrid" + + report = asyncio.run(get_research_report(query=query, report_type="research_report", report_source=report_source)) + print(report) +``` + +## Running Hybrid Research By "Online Documents" + +Here's a basic script to run hybrid research: + +```python +from gpt_researcher import GPTResearcher +import asyncio + +async def get_research_report(query: str, report_type: str, report_source: str) -> str: + researcher = GPTResearcher(query=query, report_type=report_type, document_urls=document_urls, report_source=report_source) + research = await researcher.conduct_research() + report = await researcher.write_report() + return report + +if __name__ == "__main__": + query = "How does our product roadmap compare to emerging market trends in our industry?" + report_source = "hybrid" + document_urls = ["https://xxxx.xxx.pdf", "https://xxxx.xxx.doc"] + + report = asyncio.run(get_research_report(query=query, report_type="research_report", document_urls=document_urls, report_source=report_source)) + print(report) +``` + +To run the script: + +1. Save it as `run_research.py` +2. Execute it with: `python run_research.py` + +## Understanding the Results + +The output will be a comprehensive research report that combines insights from both web sources and your local documents. The report typically includes an executive summary, key findings, detailed analysis, comparisons between your internal data and external trends, and recommendations based on the combined insights. + +## Troubleshooting + +1. **API Key Issues**: Ensure your API keys are correctly set and have the necessary permissions. +2. **Document Loading Errors**: Check that your local documents are in supported formats and are not corrupted. +3. **Memory Issues**: For large documents or extensive research, you may need to increase your system's available memory or adjust the `chunk_size` in the document processing step. + +## FAQ + +**Q: How long does a typical research session take?** +A: The duration varies based on the complexity of the query and the amount of data to process. It can range from 1-5 minutes for very comprehensive research. + +**Q: Can I use GPT Researcher with other language models?** +A: Currently, GPT Researcher is optimized for OpenAI's models. Support for other models can be found [here](../gpt-researcher/llms/llms.md). + +**Q: How does GPT Researcher handle conflicting information between local and web sources?** +A: The system attempts to reconcile differences by providing context and noting discrepancies in the final report. It prioritizes more recent or authoritative sources when conflicts arise. + +**Q: Is my local data sent to external servers during the research process?** +A: No, your local documents are processed on your machine. Only the generated queries and synthesized information (not raw data) are sent to external services for web research. + +For more information and updates, please visit the [GPT Researcher GitHub repository](https://github.com/assafelovic/gpt-researcher). diff --git a/docs/docs/examples/pip-run.ipynb b/docs/docs/examples/pip-run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..b4f6f9cadac38bb0e9db316679613b75412f260a --- /dev/null +++ b/docs/docs/examples/pip-run.ipynb @@ -0,0 +1,85 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "id": "byPgKYhAE6gn" + }, + "outputs": [], + "source": [ + "import os\n", + "os.environ['OPENAI_API_KEY'] = 'your_openai_api_key'\n", + "os.environ['TAVILY_API_KEY'] = 'your_tavily_api_key' # Get a free key here: https://app.tavily.com" + ] + }, + { + "cell_type": "code", + "source": [ + "!pip install -U gpt-researcher nest_asyncio" + ], + "metadata": { + "id": "-rXET3OZLxwH" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "import nest_asyncio # required for notebooks\n", + "nest_asyncio.apply()\n", + "\n", + "from gpt_researcher import GPTResearcher\n", + "import asyncio\n", + "\n", + "async def get_report(query: str, report_type: str) -> str:\n", + " researcher = GPTResearcher(query, report_type)\n", + " research_result = await researcher.conduct_research()\n", + " report = await researcher.write_report()\n", + " \n", + " # Get additional information\n", + " research_context = researcher.get_research_context()\n", + " research_costs = researcher.get_costs()\n", + " research_images = researcher.get_research_images()\n", + " research_sources = researcher.get_research_sources()\n", + " \n", + " return report, research_context, research_costs, research_images, research_sources\n", + "\n", + "if __name__ == \"__main__\":\n", + " query = \"Should I invest in Nvidia?\"\n", + " report_type = \"research_report\"\n", + "\n", + " report, context, costs, images, sources = asyncio.run(get_report(query, report_type))\n", + " \n", + " print(\"Report:\")\n", + " print(report)\n", + " print(\"\\nResearch Costs:\")\n", + " print(costs)\n", + " print(\"\\nResearch Images:\")\n", + " print(images)\n", + " print(\"\\nResearch Sources:\")\n", + " print(sources)" + ], + "metadata": { + "id": "KWZe2InrL0ji" + }, + "execution_count": null, + "outputs": [] + } + ] +} \ No newline at end of file diff --git a/docs/docs/examples/sample_report.py b/docs/docs/examples/sample_report.py new file mode 100644 index 0000000000000000000000000000000000000000..0bd23f8c58e6be9a8703fa431b5b5be796d79ca4 --- /dev/null +++ b/docs/docs/examples/sample_report.py @@ -0,0 +1,36 @@ +import nest_asyncio # required for notebooks + +nest_asyncio.apply() + +from gpt_researcher import GPTResearcher +import asyncio + + +async def get_report(query: str, report_type: str): + researcher = GPTResearcher(query, report_type) + research_result = await researcher.conduct_research() + report = await researcher.write_report() + + # Get additional information + research_context = researcher.get_research_context() + research_costs = researcher.get_costs() + research_images = researcher.get_research_images() + research_sources = researcher.get_research_sources() + + return report, research_context, research_costs, research_images, research_sources + + +if __name__ == "__main__": + query = "Should I invest in Nvidia?" + report_type = "research_report" + + report, context, costs, images, sources = asyncio.run(get_report(query, report_type)) + + print("Report:") + print(report) + print("\nResearch Costs:") + print(costs) + print("\nResearch Images:") + print(images) + print("\nResearch Sources:") + print(sources) \ No newline at end of file diff --git a/docs/docs/examples/sample_sources_only.py b/docs/docs/examples/sample_sources_only.py new file mode 100644 index 0000000000000000000000000000000000000000..bfef394254f2b09ae1b45cc6e4a9e6a803840182 --- /dev/null +++ b/docs/docs/examples/sample_sources_only.py @@ -0,0 +1,20 @@ +from gpt_researcher import GPTResearcher +import asyncio + + +async def get_report(query: str, report_source: str, sources: list) -> str: + researcher = GPTResearcher(query=query, report_source=report_source, source_urls=sources) + research_context = await researcher.conduct_research() + return await researcher.write_report() + +if __name__ == "__main__": + query = "What are the biggest trends in AI lately?" + report_source = "static" + sources = [ + "https://en.wikipedia.org/wiki/Artificial_intelligence", + "https://www.ibm.com/think/insights/artificial-intelligence-trends", + "https://www.forbes.com/advisor/business/ai-statistics" + ] + + report = asyncio.run(get_report(query=query, report_source=report_source, sources=sources)) + print(report) diff --git a/docs/docs/faq.md b/docs/docs/faq.md new file mode 100644 index 0000000000000000000000000000000000000000..ab97090206504bb1c2ca3e5ee93118c266cdb7fb --- /dev/null +++ b/docs/docs/faq.md @@ -0,0 +1,34 @@ +# FAQ + +### How do I get started? +It really depends on what you're aiming for. + +If you're looking to connect your AI application to the internet with Tavily tailored API, check out the [Tavily API](https://docs.tavily.com/docs/tavily-api/introductionn) documentation. +If you're looking to build and deploy our open source autonomous research agent GPT Researcher, please see [GPT Researcher](/docs/gpt-researcher/getting-started/introduction) documentation. +You can also check out demos and examples for inspiration [here](/docs/examples/examples). + +### What is GPT Researcher? + +GPT Researcher is a popular open source autonomous research agent that takes care of the tedious task of research for you, by scraping, filtering and aggregating over 20+ web sources per a single research task. + +GPT Researcher is built with best practices for leveraging LLMs (prompt engineering, RAG, chains, embeddings, etc), and is optimized for quick and efficient research. It is also fully customizable and can be tailored to your specific needs. + +To learn more about GPT Researcher, check out the [documentation page](/docs/gpt-researcher/getting-started/introduction). + +### How much does each research run cost? + +A research task using GPT Researcher costs around $0.01 per a single run (for GPT-4 usage). We're constantly optimizing LLM calls to reduce costs and improve performance. + +### How do you ensure the report is factual and accurate? + +we do our best to ensure that the information we provide is factual and accurate. We do this by using multiple sources, and by using proprietary AI to score and rank the most relevant and accurate information. We also use proprietary AI to filter out irrelevant information and sources. + +Lastly, by using RAG and other techniques, we ensure that the information is relevant to the context of the research task, leading to more accurate generative AI content and reduced hallucinations. + +### What are your plans for the future? + +We're constantly working on improving our products and services. We're currently working on improving our search API together with design partners, and adding more data sources to our search engine. We're also working on improving our research agent GPT Researcher, and adding more features to it while growing our amazing open source community. + +If you're interested in our roadmap or looking to collaborate, check out our [roadmap page](https://trello.com/b/3O7KBePw/gpt-researcher-roadmap). + +Feel free to [contact us](mailto:assafelovic@gmail.com) if you have any further questions or suggestions! \ No newline at end of file diff --git a/docs/docs/gpt-researcher/context/filtering-by-domain.md b/docs/docs/gpt-researcher/context/filtering-by-domain.md new file mode 100644 index 0000000000000000000000000000000000000000..07fe00c2258139b5c1ff32675d4436ef3d2d0dbb --- /dev/null +++ b/docs/docs/gpt-researcher/context/filtering-by-domain.md @@ -0,0 +1,24 @@ +# Filtering by Domain + +If you set Google as a Retriever, you can filter web results by site. + +For example, set in the query param you pass the GPTResearcher class instance: `query="site:linkedin.com a python web developer to implement my custom gpt-researcher flow"` will limit the results to linkedin.com + +> **Step 1** - Set these environment variables with a .env file in the root folder + +```bash +TAVILY_API_KEY= +LANGCHAIN_TRACING_V2=true +LANGCHAIN_API_KEY= +OPENAI_API_KEY= +DOC_PATH=./my-docs +RETRIEVER=google +GOOGLE_API_KEY= +GOOGLE_CX_KEY= +``` + +> **Step 2** - from the root project run: + +docker-compose up -- build + +> **Step 3** - from the frontend input box in localhost:3000, you can append any google search filter (such as filtering by domain names) diff --git a/docs/docs/gpt-researcher/context/gptr-hybrid.png b/docs/docs/gpt-researcher/context/gptr-hybrid.png new file mode 100644 index 0000000000000000000000000000000000000000..c3a3027ec7ae0d6e17b342e2322b1a832875154d --- /dev/null +++ b/docs/docs/gpt-researcher/context/gptr-hybrid.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:684394fa4ee4a17caab40113029ebc4d7a3f0ebf240d4b5a50d768e7b947a678 +size 198033 diff --git a/docs/docs/gpt-researcher/context/local-docs.md b/docs/docs/gpt-researcher/context/local-docs.md new file mode 100644 index 0000000000000000000000000000000000000000..60cb3423d04adaa8b8bcd5d4a2da77fa92f341c7 --- /dev/null +++ b/docs/docs/gpt-researcher/context/local-docs.md @@ -0,0 +1,22 @@ +# Research on Local Documents + +## Just Local Docs + +You can instruct the GPT Researcher to run research tasks based on your local documents. Currently supported file formats are: PDF, plain text, CSV, Excel, Markdown, PowerPoint, and Word documents. + +Step 1: Add the env variable `DOC_PATH` pointing to the folder where your documents are located. + +```bash +export DOC_PATH="./my-docs" +``` + +Step 2: + - If you're running the frontend app on localhost:8000, simply select "My Documents" from the "Report Source" Dropdown Options. + - If you're running GPT Researcher with the [PIP package](https://docs.tavily.com/docs/gpt-researcher/gptr/pip-package), pass the `report_source` argument as "local" when you instantiate the `GPTResearcher` class [code sample here](https://docs.gptr.dev/docs/gpt-researcher/context/tailored-research). + +## Local Docs + Web (Hybrid) + +![GPT Researcher hybrid research](./gptr-hybrid.png) + +Check out the blog post on [Hybrid Research](https://docs.gptr.dev/blog/gptr-hybrid) to learn more about how to combine local documents with web research. +``` diff --git a/docs/docs/gpt-researcher/context/tailored-research.md b/docs/docs/gpt-researcher/context/tailored-research.md new file mode 100644 index 0000000000000000000000000000000000000000..4efbff027d3a4cf97afb070216d8cb726415e4bb --- /dev/null +++ b/docs/docs/gpt-researcher/context/tailored-research.md @@ -0,0 +1,147 @@ +# Tailored Research + +The GPT Researcher package allows you to tailor the research to your needs such as researching on specific sources (URLs) or local documents, and even specify the agent prompt instruction upon which the research is conducted. + +### Research on Specific Sources 📚 + +You can specify the sources you want the GPT Researcher to research on by providing a list of URLs. The GPT Researcher will then conduct research on the provided sources via `source_urls`. + +If you want GPT Researcher to perform additional research outside of the URLs you provided, i.e., conduct research on various other websites that it finds suitable for the query/sub-query, you can set the parameter `complement_source_urls` as `True`. Default value of `False` will only scour the websites you provide via `source_urls`. + + +```python +from gpt_researcher import GPTResearcher +import asyncio + +async def get_report(query: str, report_type: str, sources: list) -> str: + researcher = GPTResearcher(query=query, report_type=report_type, source_urls=sources, complement_source_urls=False) + await researcher.conduct_research() + report = await researcher.write_report() + return report + +if __name__ == "__main__": + query = "What are the biggest trends in AI lately?" + report_source = "static" + sources = [ + "https://en.wikipedia.org/wiki/Artificial_intelligence", + "https://www.ibm.com/think/insights/artificial-intelligence-trends", + "https://www.forbes.com/advisor/business/ai-statistics" + ] + report = asyncio.run(get_report(query=query, report_source=report_source, sources=sources)) + print(report) +``` + +### Specify Agent Prompt 📝 + +You can specify the agent prompt instruction upon which the research is conducted. This allows you to guide the research in a specific direction and tailor the report layout. +Simply pass the prompt as the `query` argument to the `GPTResearcher` class and the "custom_report" `report_type`. + +```python +from gpt_researcher import GPTResearcher +import asyncio + +async def get_report(prompt: str, report_type: str) -> str: + researcher = GPTResearcher(query=prompt, report_type=report_type) + await researcher.conduct_research() + report = await researcher.write_report() + return report + +if __name__ == "__main__": + report_type = "custom_report" + prompt = "Research the latest advancements in AI and provide a detailed report in APA format including sources." + + report = asyncio.run(get_report(prompt=prompt, report_type=report_type)) + print(report) +``` + +### Research on Local Documents 📄 +You can instruct the GPT Researcher to research on local documents by providing the path to those documents. Currently supported file formats are: PDF, plain text, CSV, Excel, Markdown, PowerPoint, and Word documents. + +*Step 1*: Add the env variable `DOC_PATH` pointing to the folder where your documents are located. + +For example: + +```bash +export DOC_PATH="./my-docs" +``` + +*Step 2*: When you create an instance of the `GPTResearcher` class, pass the `report_source` argument as `"local"`. + +GPT Researcher will then conduct research on the provided documents. + +```python +from gpt_researcher import GPTResearcher +import asyncio + +async def get_report(query: str, report_source: str) -> str: + researcher = GPTResearcher(query=query, report_source=report_source) + await researcher.conduct_research() + report = await researcher.write_report() + return report + +if __name__ == "__main__": + query = "What can you tell me about myself based on my documents?" + report_source = "local" # "local" or "web" + + report = asyncio.run(get_report(query=query, report_source=report_source)) + print(report) +``` + +### Hybrid Research 🔄 +You can combine the above methods to conduct hybrid research. For example, you can instruct the GPT Researcher to research on both web sources and local documents. +Simply provide the sources and set the `report_source` argument as `"hybrid"` and watch the magic happen. + +Please note! You should set the proper retrievers for the web sources and doc path for local documents for this to work. +To learn more about retrievers check out the [Retrievers](https://docs.gptr.dev/docs/gpt-researcher/search-engines/retrievers) documentation. + + +### Research on LangChain Documents 🦜️🔗 +You can instruct the GPT Researcher to research on a list of langchain document instances. + +For example: + +```python +from langchain_core.documents import Document +from typing import List, Dict +from gpt_researcher import GPTResearcher +from langchain_postgres.vectorstores import PGVector +from langchain_openai import OpenAIEmbeddings +from sqlalchemy import create_engine +import asyncio + + + +CONNECTION_STRING = 'postgresql://someuser:somepass@localhost:5432/somedatabase' + +def get_retriever(collection_name: str, search_kwargs: Dict[str, str]): + engine = create_engine(CONNECTION_STRING) + embeddings = OpenAIEmbeddings() + + index = PGVector.from_existing_index( + use_jsonb=True, + embedding=embeddings, + collection_name=collection_name, + connection=engine, + ) + + return index.as_retriever(search_kwargs=search_kwargs) + + +async def get_report(query: str, report_type: str, report_source: str, documents: List[Document]) -> str: + researcher = GPTResearcher(query=query, report_type=report_type, report_source=report_source, documents=documents) + await researcher.conduct_research() + report = await researcher.write_report() + return report + +if __name__ == "__main__": + query = "What can you tell me about blue cheese based on my documents?" + report_type = "research_report" + report_source = "langchain_documents" + + # using a LangChain retriever to get all the documents regarding cheese + # https://api.python.langchain.com/en/latest/retrievers/langchain_core.retrievers.BaseRetriever.html#langchain_core.retrievers.BaseRetriever.invoke + langchain_retriever = get_retriever("cheese_collection", { "k": 3 }) + documents = langchain_retriever.invoke("All the documents about cheese") + report = asyncio.run(get_report(query=query, report_type=report_type, report_source=report_source, documents=documents)) + print(report) +``` diff --git a/docs/docs/gpt-researcher/context/vector-stores.md b/docs/docs/gpt-researcher/context/vector-stores.md new file mode 100644 index 0000000000000000000000000000000000000000..3e4725f06d38ec2474a43e6ecd9bb1813f551cda --- /dev/null +++ b/docs/docs/gpt-researcher/context/vector-stores.md @@ -0,0 +1,155 @@ +# Vector Stores + +The GPT Researcher package allows you to integrate with existing langchain vector stores that have been populated. +For a complete list of supported langchain vector stores, please refer to this [link](https://python.langchain.com/v0.2/docs/integrations/vectorstores/). + +You can create a set of embeddings and langchain documents and store them in any supported vector store of your choosing. +GPT-Researcher will work with any langchain vector store that implements the `asimilarity_search` method. + +**If you want to use the existing knowledge in your vector store, make sure to set `report_source="langchain_vectorstore"`. Any other settings will add additional information from scraped data and might contaminate your vectordb (See _How to add scraped data to your vector store_ for more context)** + +## Faiss +```python +from gpt_researcher import GPTResearcher + +from langchain.text_splitter import CharacterTextSplitter +from langchain_openai import OpenAIEmbeddings +from langchain_community.vectorstores import FAISS +from langchain_core.documents import Document + +# exerpt taken from - https://paulgraham.com/wealth.html +essay = """ +May 2004 + +(This essay was originally published in Hackers & Painters.) + +If you wanted to get rich, how would you do it? I think your best bet would be to start or join a startup. +That's been a reliable way to get rich for hundreds of years. The word "startup" dates from the 1960s, +but what happens in one is very similar to the venture-backed trading voyages of the Middle Ages. + +Startups usually involve technology, so much so that the phrase "high-tech startup" is almost redundant. +A startup is a small company that takes on a hard technical problem. + +Lots of people get rich knowing nothing more than that. You don't have to know physics to be a good pitcher. +But I think it could give you an edge to understand the underlying principles. Why do startups have to be small? +Will a startup inevitably stop being a startup as it grows larger? +And why do they so often work on developing new technology? Why are there so many startups selling new drugs or computer software, +and none selling corn oil or laundry detergent? + + +The Proposition + +Economically, you can think of a startup as a way to compress your whole working life into a few years. +Instead of working at a low intensity for forty years, you work as hard as you possibly can for four. +This pays especially well in technology, where you earn a premium for working fast. + +Here is a brief sketch of the economic proposition. If you're a good hacker in your mid twenties, +you can get a job paying about $80,000 per year. So on average such a hacker must be able to do at +least $80,000 worth of work per year for the company just to break even. You could probably work twice +as many hours as a corporate employee, and if you focus you can probably get three times as much done in an hour.[1] +You should get another multiple of two, at least, by eliminating the drag of the pointy-haired middle manager who +would be your boss in a big company. Then there is one more multiple: how much smarter are you than your job +description expects you to be? Suppose another multiple of three. Combine all these multipliers, +and I'm claiming you could be 36 times more productive than you're expected to be in a random corporate job.[2] +If a fairly good hacker is worth $80,000 a year at a big company, then a smart hacker working very hard without +any corporate bullshit to slow him down should be able to do work worth about $3 million a year. +... +... +... +""" + +document = [Document(page_content=essay)] +text_splitter = CharacterTextSplitter(chunk_size=200, chunk_overlap=30, separator="\n") +docs = text_splitter.split_documents(documents=document) + +vector_store = FAISS.from_documents(documents, OpenAIEmbeddings()) + +query = """ + Summarize the essay into 3 or 4 succinct sections. + Make sure to include key points regarding wealth creation. + + Include some recommendations for entrepreneurs in the conclusion. +""" + + +# Create an instance of GPTResearcher +researcher = GPTResearcher( + query=query, + report_type="research_report", + report_source="langchain_vectorstore", + vector_store=vector_store, +) + +# Conduct research and write the report +await researcher.conduct_research() +report = await researcher.write_report() +``` + + +## PGVector +```python +from gpt_researcher import GPTResearcher +from langchain_postgres.vectorstores import PGVector +from langchain_openai import OpenAIEmbeddings + +CONNECTION_STRING = 'postgresql://someuser:somepass@localhost:5432/somedatabase' + + +# assuming the vector store exists and contains the relevent documents +# also assuming embeddings have been or will be generated +vector_store = PGVector.from_existing_index( + use_jsonb=True, + embedding=OpenAIEmbeddings(), + collection_name='some collection name', + connection=CONNECTION_STRING, + async_mode=True, +) + +query = """ + Create a short report about apples. + Include a section about which apples are considered best + during each season. +""" + +# Create an instance of GPTResearcher +researcher = GPTResearcher( + query=query, + report_type="research_report", + report_source="langchain_vectorstore", + vector_store=vector_store, +) + +# Conduct research and write the report +await researcher.conduct_research() +report = await researcher.write_report() +``` +## Adding Scraped Data to your vector store + +In some cases in which you want to store the scraped data and documents into your own vector store for future usages, GPT-Researcher also allows you to do so seamlessly just by inputting your vector store (make sure to set `report_source` value to something other than `langchain_vectorstore`) + +```python +from gpt_researcher import GPTResearcher + +from langchain_community.vectorstores import InMemoryVectorStore +from langchain_openai import OpenAIEmbeddings + +vector_store = InMemoryVectorStore(embedding=OpenAIEmbeddings()) + +query = "The best LLM" + +# Create an instance of GPTResearcher +researcher = GPTResearcher( + query=query, + report_type="research_report", + report_source="web", + vector_store=vector_store, +) + +# Conduct research, the context will be chunked and stored in the vector_store +await researcher.conduct_research() + +# Query the 5 most relevant context in our vector store +related_contexts = await vector_store.asimilarity_search("GPT-4", k = 5) +print(related_contexts) +print(len(related_contexts)) #Should be 5 +``` diff --git a/docs/docs/gpt-researcher/frontend/frontend.md b/docs/docs/gpt-researcher/frontend/frontend.md new file mode 100644 index 0000000000000000000000000000000000000000..afbd79b294674cb7d259eab3001fef3e4bded4d5 --- /dev/null +++ b/docs/docs/gpt-researcher/frontend/frontend.md @@ -0,0 +1,124 @@ +# Frontend Application + +This frontend project aims to enhance the user experience of GPT Researcher, providing an intuitive and efficient interface for automated research. It offers two deployment options to suit different needs and environments. + +View a Product Tutorial here: [GPT-Researcher Frontend Tutorial](https://www.youtube.com/watch?v=hIZqA6lPusk) + + +## NextJS Frontend App + +The React app (located in the `frontend` directory) is our Frontend 2.0 which we hope will enable us to display the robustness of the backend on the frontend, as well. + +It comes with loads of added features, such as: + - a drag-n-drop user interface for uploading and deleting files to be used as local documents by GPTResearcher. + - a GUI for setting your GPTR environment variables. + - the ability to trigger the multi_agents flow via the Backend Module or Langgraph Cloud Host (currently in closed beta). + - stability fixes + - and more coming soon! + +### Run the NextJS React App with Docker + +> **Step 1** - [Install Docker](https://docs.gptr.dev/docs/gpt-researcher/getting-started/getting-started-with-docker) + +> **Step 2** - Clone the '.env.example' file, add your API Keys to the cloned file and save the file as '.env' + +> **Step 3** - Within the docker-compose file comment out services that you don't want to run with Docker. + +```bash +docker compose up --build +``` + +If that doesn't work, try running it without the dash: +```bash +docker compose up --build +``` + +> **Step 4** - By default, if you haven't uncommented anything in your docker-compose file, this flow will start 2 processes: + - the Python server running on localhost:8000 + - the React app running on localhost:3000 + +Visit localhost:3000 on any browser and enjoy researching! + + +## Other Options: 1: Static Frontend (FastAPI) + +A lightweight solution using FastAPI to serve static files. + +#### Prerequisites +- Python 3.11+ +- pip + +#### Setup and Running + +1. Install required packages: + ``` + pip install -r requirements.txt + ``` + +2. Start the server: + ``` + python -m uvicorn main:app + ``` + +3. Access at `http://localhost:8000` + +#### Demo + + + +## Yet Another Option: Running NextJS Frontend via CLI + +A more robust solution with enhanced features and performance. + +#### Prerequisites +- Node.js (v18.17.0 recommended) +- npm + +#### Setup and Running + +1. Navigate to NextJS directory: + ``` + cd nextjs + ``` + +2. Set up Node.js: + ``` + nvm install 18.17.0 + nvm use v18.17.0 + ``` + +3. Install dependencies: + ``` + npm install --legacy-peer-deps + ``` + +4. Start development server: + ``` + npm run dev + ``` + +5. Access at `http://localhost:3000` + +Note: Requires backend server on `localhost:8000` as detailed in option 1. + +#### Demo + + +## Choosing an Option + +- Static Frontend: Quick setup, lightweight deployment. +- NextJS Frontend: Feature-rich, scalable, better performance and SEO. + +For production, NextJS is recommended. + +## Frontend Features + +Our frontend enhances GPT-Researcher by providing: + +1. Intuitive Research Interface: Streamlined input for research queries. +2. Real-time Progress Tracking: Visual feedback on ongoing research tasks. +3. Interactive Results Display: Easy-to-navigate presentation of findings. +4. Customizable Settings: Adjust research parameters to suit specific needs. +5. Responsive Design: Optimal experience across various devices. + +These features aim to make the research process more efficient and user-friendly, complementing GPT-Researcher's powerful agent capabilities. \ No newline at end of file diff --git a/docs/docs/gpt-researcher/frontend/logs.md b/docs/docs/gpt-researcher/frontend/logs.md new file mode 100644 index 0000000000000000000000000000000000000000..146ba07861bbb6758154d10d883e98046e000e08 --- /dev/null +++ b/docs/docs/gpt-researcher/frontend/logs.md @@ -0,0 +1,170 @@ +# Log Files + +This document explains how to interpret the log files generated for each report. These logs provide a detailed record of the research process, from initial task planning to the gathering of information, and finally, the report writing process. Reports may change over time as new features are developed. + +## Log File Overview + +The log file is a JSON file that contains a list of events that happened during the research process. Each event is an object with a timestamp, type, and data. The data contains the specific information about the event. + +You can find the log file in the `outputs` folder. + +Or you can access the log file from the report page itself by clicking the "Download Logs" button. + +For developers, there is an additional `logs` folder that may be useful. See description below for more details. + +## Key Components: + +* `timestamp`: The timestamp is in the format `YYYY-MM-DDTHH:MM:SS.ffffff` which is an ISO format. The main timestamp is for the generation of the file itself. The timestamps for the events are when each specific event happened during the research process. +* `events`: This is an array containing all the logged events during the research task. Each event object has the following structure. +* `timestamp`: The specific time when the event occurred, allowing you to follow the sequence of actions. +* `type`: This will always be "event" for now. +* `data`: Contains specific information about the event. Includes: +* `type`: This indicates the general kind of event (e.g., "logs"). +* `content`: A descriptor of what the tool is doing (e.g., "starting\_research", "running\_subquery\_research", "scraping\_content"). +* `output`: A more detailed message, which often includes visual indicators (emojis), that is sent to the user when the tool performs the task +* `metadata`: Additional data related to the event. This can be `null` or contain an array of relevant information like URLs. + +## Types of Events & Their Significance +Here's a complete breakdown of all the unique `content` types and what they mean. This is a comprehensive list of all the different actions the research tool will perform. +1. **`starting_research`**: +* Indicates that the research process has begun for a given task. +* `output`: Includes the text of the research query. +2. **`agent_generated`**: +* This is an indicator of what the agent is used for this task +* `output`: Will show the name of the agent +3. **`planning_research`**: +* Shows the tool is initially browsing to understand the scope of the request and start planning. +* The `output` indicates the tool is either browsing or doing initial planning. +4. **`subqueries`**: +* Indicates that the tool has created subqueries that it will use for research +* `output`: Lists out all of the subqueries that the tool will be running to perform the research +* `metadata`: An array of strings that contain the subqueries to be run +5. **`running_subquery_research`**: +* Indicates that a specific subquery research is being performed. +* `output`: Shows the specific subquery being run. +6. **`added_source_url`**: +* Signifies a URL that was identified as a relevant source of information. +* `output`: Provides the URL with a checkmark emoji to indicate success. +* `metadata`: Contains the actual URL added. +7. **`researching`**: +* Indicates the tool is actively searching across multiple sources for information. +* `output`: A general message indicating research across multiple sources is happening. +8. **`scraping_urls`**: +* Shows the tool is beginning to scrape content from a group of URLs. +* `output`: Indicates how many URLs the tool will be scraping from. +9. **`scraping_content`**: +* Indicates the tool successfully scraped the content from the URLs. +* `output`: Shows the number of pages that have been successfully scraped. +10. **`scraping_images`**: +* Signifies that images were identified and selected during the scraping process. +* `output`: Shows the number of new images selected and the total images found +* `metadata`: An array containing URLs of the selected images. +11. **`scraping_complete`**: +* Indicates that the scraping process is complete for the URLs. +* `output`: A message stating that the scraping process is complete +12. **`fetching_query_content`**: +* Indicates that the tool is fetching content based on a specific query. +* `output`: The specific query for which content is being fetched +13. **`subquery_context_window`**: +* Indicates the tool is creating a context window for a given subquery to help with more detailed research. +* `output`: A message stating the context window for the subquery is created. +14. **`research_step_finalized`**: +* Indicates that the research portion of a step is finalized. +* `output`: A message stating that the research is complete. +15. **`generating_subtopics`**: +* Signifies that the tool is generating subtopics to guide the report. +* `output`: A message indicating that the tool is generating subtopics. +16. **`subtopics_generated`**: +* Indicates that subtopics have been generated. +* `output`: A message that subtopics have been generated. +17. **`writing_introduction`**: +* Indicates the tool is beginning to write the introduction to the report. +* `output`: A message to the user that the introduction writing has started. +18. **`introduction_written`**: +* Indicates the introduction to the report is finished +* `output`: A message to the user that the introduction writing is complete +19. **`generating_draft_sections`**: +* Shows that the tool is generating draft sections for the report. +* `output`: A message that the report is generating draft sections. +20. **`draft_sections_generated`**: +* Indicates the draft sections of the report are generated. +* `output`: A message to the user that the draft sections have been generated. +21. **`fetching_relevant_written_content`**: +* Indicates the tool is fetching relevant written content for the report. +* `output`: A message to the user that relevant content is being fetched +22. **`writing_report`**: +* Indicates that the tool is starting to compile the research into a report. +* `output`: A message to the user that the report generation has started. +23. **`report_written`**: +* Signifies that the report generation is complete. +* `output`: A message that the report generation is finished. +24. **`relevant_contents_context`**: +* Indicates that a context window for relevant content has been created. +* `output`: A message indicating a context window for relevant content has been created. +25. **`writing_conclusion`**: +* Indicates the tool has started writing the conclusion for the report +* `output`: A message to the user that the conclusion is being written +26. **`conclusion_written`**: +* Indicates the conclusion of the report has been written +* `output`: A message to the user that the conclusion has been written + +## How to Use the Logs + +* **Troubleshooting:** If the research results are unexpected, the log files can help you understand the exact steps the tool took, including the queries used, the sources it visited, and how the report was generated. +* **Transparency:** The logs provide transparency into the research process. You can see exactly which URLs were visited, which images were selected, and how the report was built. +* **Understanding the Process**: The logs will provide an overview of what the tool does and what each of the steps look like. +* **Reproducibility:** The log files allow users to trace the exact process. + +## Example Usage +By looking at the timestamps, you can see the flow of the research task. The logs will show you the subqueries used by the tool to approach the main query, all the URLs used, if images were selected for the research, and all the steps the tool took to generate the report. + +## Logs for Developers +In addition to the user-facing log files (detailed and summary reports), the application also generates two types of log files specifically for developers: +1. A `.log` file which is a basic log file format for logging events as they occur +2. A `.json` file which is more structured +Find the logs in the `logs` folder. + +### Basic Log File (.log) + +* **Format:** Plain text format. Each line represents a log entry. +* **Content:** + * Timestamps with millisecond precision. + * Log level: Usually `INFO`, but could include `DEBUG`, `WARNING`, or `ERROR` in a more complex setup. + * Module name (e.g., "research"). + * Descriptive messages about various processes. + * Includes data about: + * Start and end of research tasks + * Web searches being performed + * Planning of the research + * Subqueries generated and their results + * The sizes of scraped data + * The size of content found from subqueries + * The final combined size of all context found +* **Use Cases for Developers:** + * **Real-time Monitoring:** Can be used to monitor the tool's activity in real time. + * **Debugging:** Helpful for pinpointing issues by seeing the chronological flow of operations, the size of content collected, etc. + * **Performance Analysis:** Timestamps can help in identifying bottlenecks by measuring how long certain operations take. + * **High-level overview**: Allows developers to easily see which steps of the tool were performed, and some basic information like sizes of collected content. +* **Key Differences from User Logs:** + * Less structured, more for developers to review in real-time. + * Contains technical information not usually relevant to a non-developer user. + * Does not have emojis or simplified language. + * No information on the images collected + +### JSON Log File (.json) + +* **Format**: Structured JSON format +* **Content**: + * Timestamps, as in all log files + * `type` field that can be: + * `sub_query`: which contains the subquery string along with `scraped_data_size` + * `content_found`: which includes the `sub_query` and the `content_size` + * A `content` field which gives a snapshot of the overall research and can contain the final context and sources found from the research for that task +* **Use Cases for Developers**: + * **Detailed Analysis**: Allows developers to view specific details of how the tool is running, particularly related to the subqueries and the results of the research. + * **Process Understanding**: Developers can see the different subqueries run and how much content each generated which can lead to better debugging and understanding of the tool. + * **Data Inspection**: Can be useful for reviewing the generated queries and content sizes. +* **Key Differences from User Logs**: + * Highly structured and focused on subquery execution, and the results of this process, specifically the sizes of collected information. + * Does not contain simplified language, emojis, or high-level explanations. + * Does not contain information on the overall context or the images collected, it mainly focuses on the subquery process. diff --git a/docs/docs/gpt-researcher/frontend/playing-with-webhooks.md b/docs/docs/gpt-researcher/frontend/playing-with-webhooks.md new file mode 100644 index 0000000000000000000000000000000000000000..f532f49fba5f1b24f73d1ce55439147fc7eb2582 --- /dev/null +++ b/docs/docs/gpt-researcher/frontend/playing-with-webhooks.md @@ -0,0 +1,23 @@ +# Playing with Webhooks + +The GPTR Frontend is powered by Webhooks streaming back from the Backend. This allows for real-time updates on the status of your research tasks, as well as the ability to interact with the Backend directly from the Frontend. + + +## Inspecting Webhooks + +When running reports via the frontend, you can inspect the websocket messages in the Network Tab. + +Here's how: + +![image](https://github.com/user-attachments/assets/15fcb5a4-77ea-4b3b-87d7-55d4b6f80095) + + +### Am I polling the right URL? + +If you're concerned that your frontend isn't hitting the right API Endpoint, you can check the URL in the Network Tab. + +Click into the WS request & go to the "Headers" tab + +![image](https://github.com/user-attachments/assets/dbd58c1d-3506-411a-852b-e1b133b6f5c8) + +For debugging, have a look at the getHost function. \ No newline at end of file diff --git a/docs/docs/gpt-researcher/getting-started/cli.md b/docs/docs/gpt-researcher/getting-started/cli.md new file mode 100644 index 0000000000000000000000000000000000000000..4e9451936c5e9173cfc1db119a4d5f028e936eea --- /dev/null +++ b/docs/docs/gpt-researcher/getting-started/cli.md @@ -0,0 +1,81 @@ +# Run with CLI + +This command-line interface (CLI) tool allows you to generate research reports using the GPTResearcher class. It provides an easy way to conduct research on various topics and generate different types of reports. + +## Installation + +1. Clone the repository: + ``` + git clone https://github.com/yourusername/gpt-researcher.git + cd gpt-researcher + ``` + +2. Install the required dependencies: + ``` + pip install -r requirements.txt + ``` + +3. Set up your environment variables: + Create a `.env` file in the project root and add your API keys or other necessary configurations. + +## Usage + +The basic syntax for using the CLI is: + +``` +python cli.py "" --report_type [--tone ] +``` + +### Arguments + +- `query` (required): The research query you want to investigate. +- `--report_type` (required): The type of report to generate. Options include: + - `research_report`: Summary - Short and fast (~2 min) + - `detailed_report`: Detailed - In depth and longer (~5 min) + - `resource_report` + - `outline_report` + - `custom_report` + - `subtopic_report` +- `--tone` (optional): The tone of the report. Defaults to 'objective'. Options include: + - `objective`: Impartial and unbiased presentation + - `formal`: Academic standards with sophisticated language + - `analytical`: Critical evaluation and examination + - `persuasive`: Convincing viewpoint + - `informative`: Clear and comprehensive information + - `explanatory`: Clarifying complex concepts + - `descriptive`: Detailed depiction + - `critical`: Judging validity and relevance + - `comparative`: Juxtaposing different theories + - `speculative`: Exploring hypotheses + - `reflective`: Personal insights + - `narrative`: Story-based presentation + - `humorous`: Light-hearted and engaging + - `optimistic`: Highlighting positive aspects + - `pessimistic`: Focusing on challenges + +## Examples + +1. Generate a quick research report on climate change: + ``` + python cli.py "What are the main causes of climate change?" --report_type research_report + ``` + +2. Create a detailed report on artificial intelligence with an analytical tone: + ``` + python cli.py "The impact of artificial intelligence on job markets" --report_type detailed_report --tone analytical + ``` + +3. Generate an outline report on renewable energy with a persuasive tone: + ``` + python cli.py "Renewable energy sources and their potential" --report_type outline_report --tone persuasive + ``` + +## Output + +The generated report will be saved as a Markdown file in the `outputs` directory. The filename will be a unique UUID. + +## Note + +- The execution time may vary depending on the complexity of the query and the type of report requested. +- Make sure you have the necessary API keys and permissions set up in your `.env` file for the tool to function correctly. +- All tone options should be provided in lowercase. \ No newline at end of file diff --git a/docs/docs/gpt-researcher/getting-started/getting-started-with-docker.md b/docs/docs/gpt-researcher/getting-started/getting-started-with-docker.md new file mode 100644 index 0000000000000000000000000000000000000000..4e26043a231a14cf39b48d46bb063c8ab0aa6c1d --- /dev/null +++ b/docs/docs/gpt-researcher/getting-started/getting-started-with-docker.md @@ -0,0 +1,28 @@ +# Docker: Quickstart + +> **Step 1** - Install & Open Docker Desktop + +Follow instructions at https://www.docker.com/products/docker-desktop/ + + +> **Step 2** - [Follow this flow](https://www.youtube.com/watch?v=x1gKFt_6Us4) + +This mainly includes cloning the '.env.example' file, adding your API Keys to the cloned file and saving the file as '.env' + +> **Step 3** - Within root, run with Docker. + +```bash +docker-compose up --build +``` + +If that doesn't work, try running it without the dash: +```bash +docker compose up --build +``` + +> **Step 4** - By default, if you haven't uncommented anything in your docker-compose file, this flow will start 2 processes: + - the Python server running on localhost:8000 + - the React app running on localhost:3000 + +Visit localhost:3000 on any browser and enjoy researching! + diff --git a/docs/docs/gpt-researcher/getting-started/getting-started.md b/docs/docs/gpt-researcher/getting-started/getting-started.md new file mode 100644 index 0000000000000000000000000000000000000000..a77847819e31cac3ef2a7fa59b73446745327a00 --- /dev/null +++ b/docs/docs/gpt-researcher/getting-started/getting-started.md @@ -0,0 +1,104 @@ +# Getting Started + +> **Step 0** - Install Python 3.11 or later. [See here](https://www.tutorialsteacher.com/python/install-python) for a step-by-step guide. + +> **Step 1** - Download the project and navigate to its directory + +```bash +$ git clone https://github.com/assafelovic/gpt-researcher.git +$ cd gpt-researcher +``` + +> **Step 3** - Set up API keys using two methods: exporting them directly or storing them in a `.env` file. + +For Linux/Temporary Windows Setup, use the export method: + +```bash +export OPENAI_API_KEY={Your OpenAI API Key here} +export TAVILY_API_KEY={Your Tavily API Key here} +``` + +For a more permanent setup, create a `.env` file in the current `gpt-researcher` directory and input the env vars (without `export`). + +- For LLM provider, we recommend **[OpenAI GPT](https://platform.openai.com/docs/guides/gpt)**, but you can use any other LLM model (including open sources). To learn how to change the LLM model, please refer to the [documentation](https://docs.gptr.dev/docs/gpt-researcher/llms/llms) page. +- For web search API, we recommend **[Tavily Search API](https://app.tavily.com)**, but you can also refer to other search APIs of your choice by changing the search provider in config/config.py to `duckduckgo`, `google`, `bing`, `searchapi`, `serper`, `searx` and more. Then add the corresponding env API key. + +## Quickstart + +> **Step 1** - Install dependencies + +```bash +$ pip install -r requirements.txt +``` + +> **Step 2** - Run the agent with FastAPI + +```bash +$ uvicorn main:app --reload +``` + +> **Step 3** - Go to http://localhost:8000 on any browser and enjoy researching! + +## Using Virtual Environment or Poetry +Select either based on your familiarity with each: + +### Virtual Environment + +#### *Establishing the Virtual Environment with Activate/Deactivate configuration* + +Create a virtual environment using the `venv` package with the environment name ``, for example, `env`. Execute the following command in the PowerShell/CMD terminal: + +```bash +python -m venv env +``` + +To activate the virtual environment, use the following activation script in PowerShell/CMD terminal: + +```bash +.\env\Scripts\activate +``` + +To deactivate the virtual environment, run the following deactivation script in PowerShell/CMD terminal: + +```bash +deactivate +``` + +#### *Install the dependencies for a Virtual environment* + +After activating the `env` environment, install dependencies using the `requirements.txt` file with the following command: + +```bash +python -m pip install -r requirements.txt +``` + +
+ +### Poetry + +#### *Establishing the Poetry dependencies and virtual environment with Poetry version `~1.7.1`* + +Install project dependencies and simultaneously create a virtual environment for the specified project. By executing this command, Poetry reads the project's "pyproject.toml" file to determine the required dependencies and their versions, ensuring a consistent and isolated development environment. The virtual environment allows for a clean separation of project-specific dependencies, preventing conflicts with system-wide packages and enabling more straightforward dependency management throughout the project's lifecycle. + +```bash +poetry install +``` + +#### *Activate the virtual environment associated with a Poetry project* + +By running this command, the user enters a shell session within the isolated environment associated with the project, providing a dedicated space for development and execution. This virtual environment ensures that the project dependencies are encapsulated, avoiding conflicts with system-wide packages. Activating the Poetry shell is essential for seamlessly working on a project, as it ensures that the correct versions of dependencies are used and provides a controlled environment conducive to efficient development and testing. + +```bash +poetry shell +``` + +### *Run the app* +> Launch the FastAPI application agent on a *Virtual Environment or Poetry* setup by executing the following command: +```bash +python -m uvicorn main:app --reload +``` +> Visit http://localhost:8000 in any web browser and explore your research! + +
+ + diff --git a/docs/docs/gpt-researcher/getting-started/how-to-choose.md b/docs/docs/gpt-researcher/getting-started/how-to-choose.md new file mode 100644 index 0000000000000000000000000000000000000000..d3079766997d6d1b7ee2975d25a1ea26d85830d7 --- /dev/null +++ b/docs/docs/gpt-researcher/getting-started/how-to-choose.md @@ -0,0 +1,128 @@ +# How to Choose + +GPT Researcher is a powerful autonomous research agent designed to enhance and streamline your research processes. Whether you're a developer looking to integrate research capabilities into your project or an end-user seeking a comprehensive research solution, GPT Researcher offers flexible options to meet your needs. + +We envision a future where AI agents collaborate to complete complex tasks, with research being a critical step in the process. GPT Researcher aims to be your go-to agent for any research task, regardless of complexity. It can be easily integrated into existing agent workflows, eliminating the need to create your own research agent from scratch. + +## Options + +GPT Researcher offers multiple ways to leverage its capabilities: + +Logo +

+ +1. **GPT Researcher PIP agent**: Ideal for integrating GPT Researcher into your existing projects and workflows. +2. **Backend**: A backend service to interact with the frontend user interfaces, offering advanced features like detailed reports. +3. **Multi Agent System**: An advanced setup using LangGraph, offering the most comprehensive research capabilities. +4. **Frontend**: Several front-end solutions depending on your needs, including a simple HTML/JS version and a more advanced NextJS version. + +## Usage Options + +### 1. PIP Package + +The PIP package is ideal for leveraging GPT Researcher as an agent in your preferred environment and code. + +**Pros:** +- Easy integration into existing projects +- Flexible usage in multi-agent systems, chains, or workflows +- Optimized for production performance + +**Cons:** +- Requires some coding knowledge +- May need additional setup for advanced features + +**Installation:** +``` +pip install gpt-researcher +``` + +**System Requirements:** +- Python 3.10+ +- pip package manager + +**Learn More:** [PIP Documentation](https://docs.gptr.dev/docs/gpt-researcher/gptr/pip-package) + +### 2. End-to-End Application + +For a complete out-of-the-box experience, including a sleek frontend, you can clone our repository. + +**Pros:** +- Ready-to-use frontend and backend services +- Includes advanced use cases like detailed report generation +- Optimal user experience + +**Cons:** +- Less flexible than the PIP package for custom integrations +- Requires setting up the entire application + +**Getting Started:** +1. Clone the repository: `git clone https://github.com/assafelovic/gpt-researcher.git` +2. Follow the [installation instructions](https://docs.gptr.dev/docs/gpt-researcher/getting-started/getting-started) + +**System Requirements:** +- Git +- Python 3.10+ +- Node.js and npm (for frontend) + +**Advanced Usage Example:** [Detailed Report Implementation](https://github.com/assafelovic/gpt-researcher/tree/master/backend/report_type/detailed_report) + +### 3. Multi Agent System with LangGraph + +We've collaborated with LangChain to support multi-agents with LangGraph and GPT Researcher, offering the most complex and comprehensive version of GPT Researcher. + +**Pros:** +- Very detailed, customized research reports +- Inner AI agent loops and reasoning + +**Cons:** +- More expensive and time-consuming +- Heavyweight for production use + +This version is recommended for local, experimental, and educational use. We're working on providing a lighter version soon! + +**System Requirements:** +- Python 3.10+ +- LangGraph library + +**Learn More:** [GPT Researcher x LangGraph](https://docs.gptr.dev/docs/gpt-researcher/multi_agents/langgraph) + +## Comparison Table + +| Feature | PIP Package | End-to-End Application | Multi Agent System | +|---------|-------------|------------------------|---------------------| +| Ease of Integration | High | Medium | Low | +| Customization | High | Medium | High | +| Out-of-the-box UI | No | Yes | No | +| Complexity | Low | Medium | High | +| Best for | Developers | End-users | Researchers/Experimenters | + +Please note that all options have been optimized and refined for production use. + +## Deep Dive + +To learn more about each of the options, check out these docs and code snippets: + +1. **PIP Package**: + - Install: `pip install gpt-researcher` + - [Integration guide](https://docs.gptr.dev/docs/gpt-researcher/gptr/pip-package) + +2. **End-to-End Application**: + - Clone the repository: `git clone https://github.com/assafelovic/gpt-researcher.git` + - [Installation instructions](https://docs.gptr.dev/docs/gpt-researcher/getting-started/getting-started) + +3. **Multi-Agent System**: + - [Multi-Agents code](https://github.com/assafelovic/gpt-researcher/tree/master/multi_agents) + - [LangGraph documentation](https://docs.gptr.dev/docs/gpt-researcher/multi_agents/langgraph) + - [Blog](https://docs.gptr.dev/blog/gptr-langgraph) + +## Versioning and Updates + +GPT Researcher is actively maintained and updated. To ensure you're using the latest version: + +- For the PIP package: `pip install --upgrade gpt-researcher` +- For the End-to-End Application: Pull the latest changes from the GitHub repository +- For the Multi-Agent System: Check the documentation for compatibility with the latest LangChain and LangGraph versions + +## Troubleshooting and FAQs + +For common issues and questions, please refer to our [FAQ section](https://docs.gptr.dev/docs/faq) in the documentation. diff --git a/docs/docs/gpt-researcher/getting-started/introduction.md b/docs/docs/gpt-researcher/getting-started/introduction.md new file mode 100644 index 0000000000000000000000000000000000000000..24ecf6505badcb2402e267fa5940d4c827459d9a --- /dev/null +++ b/docs/docs/gpt-researcher/getting-started/introduction.md @@ -0,0 +1,58 @@ +# Introduction + +[![Official Website](https://img.shields.io/badge/Official%20Website-gptr.dev-teal?style=for-the-badge&logo=world&logoColor=white)](https://gptr.dev) +[![Discord Follow](https://dcbadge.vercel.app/api/server/QgZXvJAccX?style=for-the-badge&theme=clean-inverted)](https://discord.gg/QgZXvJAccX) + +[![GitHub Repo stars](https://img.shields.io/github/stars/assafelovic/gpt-researcher?style=social)](https://github.com/assafelovic/gpt-researcher) +[![Twitter Follow](https://img.shields.io/twitter/follow/assaf_elovic?style=social)](https://twitter.com/assaf_elovic) +[![PyPI version](https://badge.fury.io/py/gpt-researcher.svg)](https://badge.fury.io/py/gpt-researcher) +[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/assafelovic/gpt-researcher/blob/master/docs/docs/examples/pip-run.ipynb) + +**[GPT Researcher](https://gptr.dev) is an autonomous agent designed for comprehensive online research on a variety of tasks.** + +The agent can produce detailed, factual and unbiased research reports, with customization options for focusing on relevant resources, outlines, and lessons. Inspired by the recent [Plan-and-Solve](https://arxiv.org/abs/2305.04091) and [RAG](https://arxiv.org/abs/2005.11401) papers, GPT Researcher addresses issues of speed, determinism and reliability, offering a more stable performance and increased speed through parallelized agent work, as opposed to synchronous operations. + +## Why GPT Researcher? + +- To form objective conclusions for manual research tasks can take time, sometimes weeks to find the right resources and information. +- Current LLMs are trained on past and outdated information, with heavy risks of hallucinations, making them almost irrelevant for research tasks. +- Current LLMs are limited to short token outputs which are not sufficient for long detailed research reports (2k+ words). +- Solutions that enable web search (such as ChatGPT + Web Plugin), only consider limited resources and content that in some cases result in superficial conclusions or biased answers. +- Using only a selection of resources can create bias in determining the right conclusions for research questions or tasks. + +## Architecture +The main idea is to run "planner" and "execution" agents, whereas the planner generates questions to research, and the execution agents seek the most related information based on each generated research question. Finally, the planner filters and aggregates all related information and creates a research report.

+The agents leverage both gpt-4o-mini and gpt-4o (128K context) to complete a research task. We optimize for costs using each only when necessary. **The average research task takes around 3 minutes to complete, and costs ~$0.1.** + +
+ +
+ + +More specifically: +* Create a domain specific agent based on research query or task. +* Generate a set of research questions that together form an objective opinion on any given task. +* For each research question, trigger a crawler agent that scrapes online resources for information relevant to the given task. +* For each scraped resources, summarize based on relevant information and keep track of its sources. +* Finally, filter and aggregate all summarized sources and generate a final research report. + +## Demo + + +## Tutorials + - [Full Introduction Playlist](https://www.youtube.com/playlist?list=PLUGOUZPIB0F-qv6MvKq3HGr0M_b3U2ATv) + - [How it Works](https://medium.com/better-programming/how-i-built-an-autonomous-ai-agent-for-online-research-93435a97c6c) + - [How to Install](https://www.loom.com/share/04ebffb6ed2a4520a27c3e3addcdde20?sid=da1848e8-b1f1-42d1-93c3-5b0b9c3b24ea) + - [Live Demo](https://www.loom.com/share/6a3385db4e8747a1913dd85a7834846f?sid=a740fd5b-2aa3-457e-8fb7-86976f59f9b8) + - [Homepage](https://gptr.dev) + +## Features +- 📝 Generate research, outlines, resources and lessons reports +- 📜 Can generate long and detailed research reports (over 2K words) +- 🌐 Aggregates over 20 web sources per research to form objective and factual conclusions +- 🖥️ Includes an easy-to-use web interface (HTML/CSS/JS) +- 🔍 Scrapes web sources with javascript support +- 📂 Keeps track and context of visited and used web sources +- 📄 Export research reports to PDF, Word and more... + +Let's get started [here](/docs/gpt-researcher/getting-started/getting-started)! diff --git a/docs/docs/gpt-researcher/getting-started/linux-deployment.md b/docs/docs/gpt-researcher/getting-started/linux-deployment.md new file mode 100644 index 0000000000000000000000000000000000000000..eb0f19f0ccdd50020b89cdde20c94e19600e6961 --- /dev/null +++ b/docs/docs/gpt-researcher/getting-started/linux-deployment.md @@ -0,0 +1,167 @@ +# Running on Linux + +This guide will walk you through the process of deploying GPT Researcher on a Linux server. + +## Server Requirements + +The default Ubuntu droplet option on [DigitalOcean](https://m.do.co/c/1a2af257efba) works well, but this setup should work on any hosting service with similar specifications: + +- 2 GB RAM +- 1 vCPU +- 50 GB SSD Storage + +Here's a screenshot of the recommended Ubuntu machine specifications: + +![Ubuntu Server Specifications](https://github.com/user-attachments/assets/035865c0-d1a2-4990-b7fb-544c229d5198) + +## Deployment Steps + +After setting up your server, follow these steps to install Docker, Docker Compose, and Nginx. + + +Some more commands to achieve that: + +### Step 1: Update the System +### First, ensure your package index is up-to-date: + +```bash +sudo apt update +### Step 2: Install Git +### Git is a version control system. Install it using: + +sudo apt install git -y + +### Verify the installation by checking the Git version: +git --version +### Step 3: Install Docker +### Docker is a platform for developing, shipping, and running applications inside containers. + +### Install prerequisites: + +sudo apt install apt-transport-https ca-certificates curl software-properties-common -y +### Add Docker’s official GPG key: + +curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg +### Set up the stable repository: + +echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null +### Update the package index again and install Docker: + +sudo apt update +sudo apt install docker-ce -y +### Verify Docker installation: + +sudo systemctl status docker +### Optionally, add your user to the docker group to run Docker without sudo: + +sudo usermod -aG docker ${USER} +### Log out and back in for the group change to take effect. + +Step 4: Install Nginx +### Nginx is a high-performance web server. + +### Install Nginx: + +sudo apt install nginx -y +### Start and enable Nginx: + +sudo systemctl start nginx +sudo systemctl enable nginx +### Verify Nginx installation: + +sudo systemctl status nginx +``` + +Here's your nginx config file: + +```bash +events {} + +http { + server { + listen 80; + server_name name.example; + + client_max_body_size 64M; + + location / { + proxy_pass http://localhost:3000; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_cache_bypass $http_upgrade; + } + + location ~ ^/(ws|upload|files|outputs|getConfig|setConfig) { + proxy_pass http://localhost:8000; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "Upgrade"; + proxy_set_header Host $host; + } + } +} +``` + +And if you're using SSL: + +```nginx +server { + server_name name.example; + + client_max_body_size 64M; + + location / { + proxy_pass http://localhost:3000; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_cache_bypass $http_upgrade; + } + + location ~ ^/(ws|upload|files|outputs|getConfig|setConfig) { + proxy_pass http://localhost:8000; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "Upgrade"; + proxy_set_header Host $host; + } + + listen 443 ssl; # managed by Certbot + ssl_certificate /etc/letsencrypt/live/name.example/fullchain.pem; # managed by Certbot + ssl_certificate_key /etc/letsencrypt/live/name.example/privkey.pem; # managed by Certbot + include /etc/letsencrypt/options-ssl-nginx.conf; # managed by Certbot + ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem; # managed by Certbot +} + +server { + if ($host = name.example) { + return 301 https://$host$request_uri; + } # managed by Certbot + + listen 80; + server_name name.example; + return 404; # managed by Certbot +} +``` + +And the relevant commands: + + +```bash +vim /etc/nginx/nginx.conf +### Edit it to reflect above. Then verify all is good with: + +sudo nginx -t +# If there are no errors: + +sudo systemctl restart nginx + +# Clone .env.example as .env +# Run from root: + +docker-compose up --build + +``` \ No newline at end of file diff --git a/docs/docs/gpt-researcher/gptr/automated-tests.md b/docs/docs/gpt-researcher/gptr/automated-tests.md new file mode 100644 index 0000000000000000000000000000000000000000..7f6f3d08f14f9bf0c0650a072213111865eff047 --- /dev/null +++ b/docs/docs/gpt-researcher/gptr/automated-tests.md @@ -0,0 +1,43 @@ +# Automated Tests + +## Automated Testing with Github Actions + +This repository contains the code for the automated testing of the GPT-Researcher Repo using Github Actions. + +The tests are triggered in a docker container which runs the tests via the `pytest` module. + +## Running the Tests + +You can run the tests: + +### Via a docker command + +```bash +docker-compose --profile test run --rm gpt-researcher-tests +``` + +### Via a Github Action + +![image](https://github.com/user-attachments/assets/721fca20-01bb-4c10-9cf9-19d823bebbb0) + +Attaching here the required settings & screenshots on the github repo level: + +Step 1: Within the repo, press the "Settings" tab + +Step 2: Create a new environment named "tests" (all lowercase) + +Step 3: Click into the "tests" environment & add environment secrets of ```OPENAI_API_KEY``` & ```TAVILY_API_KEY``` + +Get the keys from here: + +https://app.tavily.com/sign-in + +https://platform.openai.com/api-keys + + +![Screen Shot 2024-07-28 at 9 00 19](https://github.com/user-attachments/assets/7cd341c6-d8d4-461f-ab5e-325abc9fe509) +![Screen Shot 2024-07-28 at 9 02 55](https://github.com/user-attachments/assets/a3744f01-06a6-4c9d-8aa0-1fc742d3e866) + +If configured correctly, here's what the Github action should look like when opening a new PR or committing to an open PR: + +![Screen Shot 2024-07-28 at 8 57 02](https://github.com/user-attachments/assets/30dbc668-4e6a-4b3b-a02e-dc859fc9bd3d) \ No newline at end of file diff --git a/docs/docs/gpt-researcher/gptr/config.md b/docs/docs/gpt-researcher/gptr/config.md new file mode 100644 index 0000000000000000000000000000000000000000..3203cc4123a4a68471b0e83ef27c98c916794329 --- /dev/null +++ b/docs/docs/gpt-researcher/gptr/config.md @@ -0,0 +1,80 @@ +# Configuration + +The config.py enables you to customize GPT Researcher to your specific needs and preferences. + +Thanks to our amazing community and contributions, GPT Researcher supports multiple LLMs and Retrievers. +In addition, GPT Researcher can be tailored to various report formats (such as APA), word count, research iterations depth, etc. + +GPT Researcher defaults to our recommended suite of integrations: [OpenAI](https://platform.openai.com/docs/overview) for LLM calls and [Tavily API](https://app.tavily.com) for retrieving real-time web information. + +As seen below, OpenAI still stands as the superior LLM. We assume it will stay this way for some time, and that prices will only continue to decrease, while performance and speed increase over time. + +
+ +
+ +The default config.py file can be found in `/gpt_researcher/config/`. It supports various options for customizing GPT Researcher to your needs. +You can also include your own external JSON file `config.json` by adding the path in the `config_file` param. **Please follow the config.py file for additional future support**. + +Below is a list of current supported options: + +- **`RETRIEVER`**: Web search engine used for retrieving sources. Defaults to `tavily`. Options: `duckduckgo`, `bing`, `google`, `searchapi`, `serper`, `searx`. [Check here](https://github.com/assafelovic/gpt-researcher/tree/master/gpt_researcher/retrievers) for supported retrievers +- **`EMBEDDING`**: Embedding model. Defaults to `openai:text-embedding-3-small`. Options: `ollama`, `huggingface`, `azure_openai`, `custom`. +- **`FAST_LLM`**: Model name for fast LLM operations such summaries. Defaults to `openai:gpt-4o-mini`. +- **`SMART_LLM`**: Model name for smart operations like generating research reports and reasoning. Defaults to `openai:gpt-4o`. +- **`STRATEGIC_LLM`**: Model name for strategic operations like generating research plans and strategies. Defaults to `openai:o1-preview`. +- **`LANGUAGE`**: Language to be used for the final research report. Defaults to `english`. +- **`CURATE_SOURCES`**: Whether to curate sources for research. This step adds an LLM run which may increase costs and total run time but improves quality of source selection. Defaults to `True`. +- **`FAST_TOKEN_LIMIT`**: Maximum token limit for fast LLM responses. Defaults to `2000`. +- **`SMART_TOKEN_LIMIT`**: Maximum token limit for smart LLM responses. Defaults to `4000`. +- **`STRATEGIC_TOKEN_LIMIT`**: Maximum token limit for strategic LLM responses. Defaults to `4000`. +- **`BROWSE_CHUNK_MAX_LENGTH`**: Maximum length of text chunks to browse in web sources. Defaults to `8192`. +- **`SUMMARY_TOKEN_LIMIT`**: Maximum token limit for generating summaries. Defaults to `700`. +- **`TEMPERATURE`**: Sampling temperature for LLM responses, typically between 0 and 1. A higher value results in more randomness and creativity, while a lower value results in more focused and deterministic responses. Defaults to `0.55`. +- **`TOTAL_WORDS`**: Total word count limit for document generation or processing tasks. Defaults to `800`. +- **`REPORT_FORMAT`**: Preferred format for report generation. Defaults to `APA`. Consider formats like `MLA`, `CMS`, `Harvard style`, `IEEE`, etc. +- **`MAX_ITERATIONS`**: Maximum number of iterations for processes like query expansion or search refinement. Defaults to `3`. +- **`AGENT_ROLE`**: Role of the agent. This might be used to customize the behavior of the agent based on its assigned roles. No default value. +- **`MAX_SUBTOPICS`**: Maximum number of subtopics to generate or consider. Defaults to `3`. +- **`SCRAPER`**: Web scraper to use for gathering information. Defaults to `bs` (BeautifulSoup). You can also use [newspaper](https://github.com/codelucas/newspaper). +- **`DOC_PATH`**: Path to read and research local documents. Defaults to an empty string indicating no path specified. +- **`USER_AGENT`**: Custom User-Agent string for web crawling and web requests. +- **`MEMORY_BACKEND`**: Backend used for memory operations, such as local storage of temporary data. Defaults to `local`. + +To change the default configurations, you can simply add env variables to your `.env` file as named above or export manually in your local project directory. + +For example, to manually change the search engine and report format: +```bash +export RETRIEVER=bing +export REPORT_FORMAT=IEEE +``` +Please note that you might need to export additional env vars and obtain API keys for other supported search retrievers and LLM providers. Please follow your console logs for further assistance. +To learn more about additional LLM support you can check out the docs [here](/docs/gpt-researcher/llms/llms). + +You can also include your own external JSON file `config.json` by adding the path in the `config_file` param. + +## Example: Azure OpenAI Configuration + +If you are not using OpenAI's models, but other model providers, besides the general configuration above, also additional environment variables are required. +Check the [langchain documentation](https://python.langchain.com/v0.2/docs/integrations/platforms/) about your model for the exact configuration of the API keys and endpoints. + +Here is an example for [Azure OpenAI](https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models) configuration: + +```bash + +OPENAI_API_VERSION="2024-05-01-preview" # or whatever you are using +AZURE_OPENAI_ENDPOINT="https://CHANGEMEN.openai.azure.com/" # change to the name of your deployment +AZURE_OPENAI_API_KEY="[Your Key]" # change to your API key + +EMBEDDING="azure_openai:text-embedding-ada-002" # change to the deployment of your embedding model + +FAST_LLM="azure_openai:gpt-4o-mini" # change to the name of your deployment (not model-name) +FAST_TOKEN_LIMIT=4000 + +SMART_LLM="azure_openai:gpt-4o" # change to the name of your deployment (not model-name) +SMART_TOKEN_LIMIT=4000 + +RETRIEVER="bing" # if you are using Bing as your search engine (which is likely if you use Azure) +BING_API_KEY="[Your Key]" + +``` diff --git a/docs/docs/gpt-researcher/gptr/example.md b/docs/docs/gpt-researcher/gptr/example.md new file mode 100644 index 0000000000000000000000000000000000000000..cc38e3f7d98334f644c0f4d1b89d68f94dc75113 --- /dev/null +++ b/docs/docs/gpt-researcher/gptr/example.md @@ -0,0 +1,32 @@ +# Agent Example + +If you're interested in using GPT Researcher as a standalone agent, you can easily import it into any existing Python project. Below, is an example of calling the agent to generate a research report: + +```python +from gpt_researcher import GPTResearcher +import asyncio + +async def fetch_report(query): + """ + Fetch a research report based on the provided query and report type. + """ + researcher = GPTResearcher(query=query) + await researcher.conduct_research() + report = await researcher.write_report() + return report + +async def generate_research_report(query): + """ + This is a sample script that executes an async main function to run a research report. + """ + report = await fetch_report(query) + print(report) + +if __name__ == "__main__": + QUERY = "What happened in the latest burning man floods?" + asyncio.run(generate_research_report(query=QUERY)) +``` + +You can further enhance this example to use the returned report as context for generating valuable content such as news article, marketing content, email templates, newsletters, etc. + +You can also use GPT Researcher to gather information about code documentation, business analysis, financial information and more. All of which can be used to complete much more complex tasks that require factual and high quality realtime information. diff --git a/docs/docs/gpt-researcher/gptr/handling-logs-as-they-stream.md b/docs/docs/gpt-researcher/gptr/handling-logs-as-they-stream.md new file mode 100644 index 0000000000000000000000000000000000000000..f013c041d89fe7a58fc8d9dc9231096aff0b6d01 --- /dev/null +++ b/docs/docs/gpt-researcher/gptr/handling-logs-as-they-stream.md @@ -0,0 +1,64 @@ +# Handling Logs + +Here is a snippet of code to help you handle the streaming logs of your Research tasks. + +```python +from typing import Dict, Any +import asyncio +from gpt_researcher import GPTResearcher + +class CustomLogsHandler: + """A custom Logs handler class to handle JSON data.""" + def __init__(self): + self.logs = [] # Initialize logs to store data + + async def send_json(self, data: Dict[str, Any]) -> None: + """Send JSON data and log it.""" + self.logs.append(data) # Append data to logs + print(f"My custom Log: {data}") # For demonstration, print the log + +async def run(): + # Define the necessary parameters with sample values + + query = "What happened in the latest burning man floods?" + report_type = "research_report" # Type of report to generate + report_source = "online" # Could specify source like 'online', 'books', etc. + tone = "informative" # Tone of the report ('informative', 'casual', etc.) + config_path = None # Path to a config file, if needed + + # Initialize researcher with a custom WebSocket + custom_logs_handler = CustomLogsHandler() + + researcher = GPTResearcher( + query=query, + report_type=report_type, + report_source=report_source, + tone=tone, + config_path=config_path, + websocket=custom_logs_handler + ) + + await researcher.conduct_research() # Conduct the research + report = await researcher.write_report() # Write the research report + + return report + +# Run the asynchronous function using asyncio +if __name__ == "__main__": + asyncio.run(run()) +``` + +The data from the research process will be logged and stored in the `CustomLogsHandler` instance. You can customize the logging behavior as needed for your application. + +Here's a sample of the output: + +``` +{ + "type": "logs", + "content": "added_source_url", + "output": "✅ Added source url to research: https://www.npr.org/2023/09/28/1202110410/how-rumors-and-conspiracy-theories-got-in-the-way-of-mauis-fire-recovery\n", + "metadata": "https://www.npr.org/2023/09/28/1202110410/how-rumors-and-conspiracy-theories-got-in-the-way-of-mauis-fire-recovery" +} +``` + +The `metadata` field will include whatever metadata is relevant to the log entry. Let the script above run to completion for the full logs output of a given research task. \ No newline at end of file diff --git a/docs/docs/gpt-researcher/gptr/pip-package.md b/docs/docs/gpt-researcher/gptr/pip-package.md new file mode 100644 index 0000000000000000000000000000000000000000..0e9d60bcb28bd1d1ed80465ee86ee3e06d032369 --- /dev/null +++ b/docs/docs/gpt-researcher/gptr/pip-package.md @@ -0,0 +1,272 @@ +# PIP Package +[![PyPI version](https://badge.fury.io/py/gpt-researcher.svg)](https://badge.fury.io/py/gpt-researcher) +[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/assafelovic/gpt-researcher/blob/master/docs/docs/examples/pip-run.ipynb) + +🌟 **Exciting News!** Now, you can integrate `gpt-researcher` with your apps seamlessly! + +## Steps to Install GPT Researcher + +Follow these easy steps to get started: + +0. **Pre-requisite**: Ensure Python 3.10+ is installed on your machine 💻 +1. **Install gpt-researcher**: Grab the official package from [PyPi](https://pypi.org/project/gpt-researcher/). + +```bash +pip install gpt-researcher +``` + +2. **Environment Variables:** Create a .env file with your OpenAI API key or simply export it + +```bash +export OPENAI_API_KEY={Your OpenAI API Key here} +``` + +```bash +export TAVILY_API_KEY={Your Tavily API Key here} +``` + +3. **Start using GPT Researcher in your own codebase** + +## Example Usage + +```python +from gpt_researcher import GPTResearcher +import asyncio + +async def get_report(query: str, report_type: str): + researcher = GPTResearcher(query, report_type) + research_result = await researcher.conduct_research() + report = await researcher.write_report() + + # Get additional information + research_context = researcher.get_research_context() + research_costs = researcher.get_costs() + research_images = researcher.get_research_images() + research_sources = researcher.get_research_sources() + + return report, research_context, research_costs, research_images, research_sources + +if __name__ == "__main__": + query = "what team may win the NBA finals?" + report_type = "research_report" + + report, context, costs, images, sources = asyncio.run(get_report(query, report_type)) + + print("Report:") + print(report) + print("\nResearch Costs:") + print(costs) + print("\nNumber of Research Images:") + print(len(images)) + print("\nNumber of Research Sources:") + print(len(sources)) +``` + +## Specific Examples + +### Example 1: Research Report + +```python +query = "Latest developments in renewable energy technologies" +report_type = "research_report" +``` + +### Example 2: Resource Report + +```python +query = "List of top AI conferences in 2023" +report_type = "resource_report" +``` + +### Example 3: Outline Report + +```python +query = "Outline for an article on the impact of AI in education" +report_type = "outline_report" +``` + +## Integration with Web Frameworks + +### FastAPI Example + +```python +from fastapi import FastAPI +from gpt_researcher import GPTResearcher +import asyncio + +app = FastAPI() + +@app.get("/report/{report_type}") +async def get_report(query: str, report_type: str) -> dict: + researcher = GPTResearcher(query, report_type) + research_result = await researcher.conduct_research() + report = await researcher.write_report() + + source_urls = researcher.get_source_urls() + research_costs = researcher.get_costs() + research_images = researcher.get_research_images() + research_sources = researcher.get_research_sources() + + return { + "report": report, + "source_urls": source_urls, + "research_costs": research_costs, + "num_images": len(research_images), + "num_sources": len(research_sources) + } + +# Run the server +# uvicorn main:app --reload +``` + +### Flask Example + +**Pre-requisite**: Install flask with the async extra. + +```bash +pip install 'flask[async]' +``` + +```python +from flask import Flask, request, jsonify +from gpt_researcher import GPTResearcher + +app = Flask(__name__) + +@app.route('/report/', methods=['GET']) +async def get_report(report_type): + query = request.args.get('query') + researcher = GPTResearcher(query, report_type) + research_result = await researcher.conduct_research() + report = await researcher.write_report() + + source_urls = researcher.get_source_urls() + research_costs = researcher.get_costs() + research_images = researcher.get_research_images() + research_sources = researcher.get_research_sources() + + return jsonify({ + "report": report, + "source_urls": source_urls, + "research_costs": research_costs, + "num_images": len(research_images), + "num_sources": len(research_sources) + }) + +# Run the server +# flask run +``` + +**Run the server** + +```bash +flask run +``` + +**Example Request** + +```bash +curl -X GET "http://localhost:5000/report/research_report?query=what team may win the nba finals?" +``` + +## Getters and Setters +GPT Researcher provides several methods to retrieve additional information about the research process: + +### Get Research Sources +Sources are the URLs that were used to gather information for the research. +```python +source_urls = researcher.get_source_urls() +``` + +### Get Research Context +Context is all the retrieved information from the research. It includes the sources and their corresponding content. +```python +research_context = researcher.get_research_context() +``` + +### Get Research Costs +Costs are the number of tokens consumed during the research process. +```python +research_costs = researcher.get_costs() +``` + +### Get Research Images +Retrieves a list of images found during the research process. +```python +research_images = researcher.get_research_images() +``` + +### Get Research Sources +Retrieves a list of research sources, including title, content, and images. +```python +research_sources = researcher.get_research_sources() +``` + +### Set Verbose +You can set the verbose mode to get more detailed logs. +```python +researcher.set_verbose(True) +``` + +### Add Costs +You can also add costs to the research process if you want to track the costs from external usage. +```python +researcher.add_costs(0.22) +``` + +## Advanced Usage + +### Customizing the Research Process + +You can customize various aspects of the research process by passing additional parameters when initializing the GPTResearcher: + +```python +researcher = GPTResearcher( + query="Your research query", + report_type="research_report", + report_format="APA", + tone="formal and objective", + max_subtopics=5, + verbose=True +) +``` + +### Handling Research Results + +After conducting research, you can process the results in various ways: + +```python +# Conduct research +research_result = await researcher.conduct_research() + +# Generate a report +report = await researcher.write_report() + +# Generate a conclusion +conclusion = await researcher.write_report_conclusion(report) + +# Get subtopics +subtopics = await researcher.get_subtopics() + +# Get draft section titles for a subtopic +draft_titles = await researcher.get_draft_section_titles("Subtopic name") +``` + +### Working with Research Context + +You can use the research context for further processing or analysis: + +```python +# Get the full research context +context = researcher.get_research_context() + +# Get similar written contents based on draft section titles +similar_contents = await researcher.get_similar_written_contents_by_draft_section_titles( + current_subtopic="Subtopic name", + draft_section_titles=["Title 1", "Title 2"], + written_contents=some_written_contents, + max_results=10 +) +``` + +This comprehensive documentation should help users understand and utilize the full capabilities of the GPT Researcher package. diff --git a/docs/docs/gpt-researcher/gptr/querying-the-backend.md b/docs/docs/gpt-researcher/gptr/querying-the-backend.md new file mode 100644 index 0000000000000000000000000000000000000000..993289a760960a7e391acf9a7f6c1da37b868305 --- /dev/null +++ b/docs/docs/gpt-researcher/gptr/querying-the-backend.md @@ -0,0 +1,106 @@ +# Querying the Backend + +## Introduction + +In this section, we will discuss how to query the GPTR backend server. The GPTR backend server is a Python server that runs the GPTR Python package. The server listens for WebSocket connections and processes incoming messages to generate reports, streaming back logs and results to the client. + +An example WebSocket client is implemented in the `gptr-webhook.js` file below. + +This function sends a Webhook Message to the GPTR Python backend running on localhost:8000, but this example can also be modified to query a [GPTR Server hosted on Linux](https://docs.gptr.dev/docs/gpt-researcher/getting-started/linux-deployment). + +// gptr-webhook.js + +```javascript + +const WebSocket = require('ws'); + +let socket = null; +let responseCallback = null; + +async function initializeWebSocket() { + if (!socket) { + const host = 'localhost:8000'; + const ws_uri = `ws://${host}/ws`; + + socket = new WebSocket(ws_uri); + + socket.onopen = () => { + console.log('WebSocket connection established'); + }; + + socket.onmessage = (event) => { + const data = JSON.parse(event.data); + console.log('WebSocket data received:', data); + + if (data.content === 'dev_team_result' + && data.output.rubber_ducker_thoughts != undefined + && data.output.tech_lead_review != undefined) { + if (responseCallback) { + responseCallback(data.output); + responseCallback = null; // Clear callback after use + } + } else { + console.log('Received data:', data); + } + }; + + socket.onclose = () => { + console.log('WebSocket connection closed'); + socket = null; + }; + + socket.onerror = (error) => { + console.error('WebSocket error:', error); + }; + } +} + +async function sendWebhookMessage(message) { + return new Promise((resolve, reject) => { + if (!socket || socket.readyState !== WebSocket.OPEN) { + initializeWebSocket(); + } + + const data = { + task: message, + report_type: 'dev_team', + report_source: 'web', + tone: 'Objective', + headers: {}, + repo_name: 'elishakay/gpt-researcher' + }; + + const payload = "start " + JSON.stringify(data); + + responseCallback = (response) => { + resolve(response); // Resolve the promise with the WebSocket response + }; + + if (socket.readyState === WebSocket.OPEN) { + socket.send(payload); + console.log('Message sent:', payload); + } else { + socket.onopen = () => { + socket.send(payload); + console.log('Message sent after connection:', payload); + }; + } + }); +} + +module.exports = { + sendWebhookMessage +}; +``` + +And here's how you can leverage this helper function: + +```javascript +const { sendWebhookMessage } = require('./gptr-webhook'); + +async function main() { + const message = 'How do I get started with GPT-Researcher Websockets?'; + const response = await sendWebhookMessage(message); + console.log('Response:', response); +} +``` \ No newline at end of file diff --git a/docs/docs/gpt-researcher/gptr/scraping.md b/docs/docs/gpt-researcher/gptr/scraping.md new file mode 100644 index 0000000000000000000000000000000000000000..cca9e4b3beae6c6b2e62077bfb571c5c506987dc --- /dev/null +++ b/docs/docs/gpt-researcher/gptr/scraping.md @@ -0,0 +1,133 @@ +# Scraping Options + +GPT Researcher now offers various methods for web scraping: static scraping with BeautifulSoup, dynamic scraping with Selenium, and High scale scraping with Tavily Extract. This document explains how to switch between these methods and the benefits of each approach. + +## Configuring Scraping Method + +You can choose your preferred scraping method by setting the `SCRAPER` environment variable: + +1. For BeautifulSoup (static scraping): + ``` + export SCRAPER="bs" + ``` + +2. For Selenium (dynamic browser scraping): + ``` + export SCRAPER="browser" + ``` + +3. For **production** use cases, you can set the Scraper to `tavily_extract`. [Tavily](https://tavily.com) allows you to scrape sites at scale without the hassle of setting up proxies, managing cookies, or dealing with CAPTCHAs. Please note that you need to have a Tavily account and [API key](https://app.tavily.com) to use this option. To learn more about Tavily Extract [see here](https://docs.tavily.com/docs/python-sdk/tavily-extract/getting-started). + Make sure to first install the pip package `tavily-python`. Then: + ``` + export SCRAPER="tavily_extract" + ``` + +Note: If not set, GPT Researcher will default to BeautifulSoup for scraping. + +## Scraping Methods Explained + +### BeautifulSoup (Static Scraping) + +When `SCRAPER="bs"`, GPT Researcher uses BeautifulSoup for static scraping. This method: + +- Sends a single HTTP request to fetch the page content +- Parses the static HTML content +- Extracts text and data from the parsed HTML + +Benefits: +- Faster and more lightweight +- Doesn't require additional setup +- Works well for simple, static websites + +Limitations: +- Cannot handle dynamic content loaded by JavaScript +- May miss content that requires user interaction to display + +### Selenium (Browser Scraping) + +When `SCRAPER="browser"`, GPT Researcher uses Selenium for dynamic scraping. This method: + +- Opens a real browser instance (Chrome by default) +- Loads the page and executes JavaScript +- Waits for dynamic content to load +- Extracts text and data from the fully rendered page + +Benefits: +- Can scrape dynamically loaded content +- Simulates real user interactions (scrolling, clicking, etc.) +- Works well for complex, JavaScript-heavy websites + +Limitations: +- Slower than static scraping +- Requires more system resources +- Requires additional setup (Selenium and WebDriver installation) + +### Tavily Extract (Recommended for Production) + +When `SCRAPER="tavily_extract"`, GPT Researcher uses Tavily's Extract API for web scraping. This method: + +- Uses Tavily's robust infrastructure to handle web scraping at scale +- Automatically handles CAPTCHAs, JavaScript rendering, and anti-bot measures +- Provides clean, structured content extraction + +Benefits: +- Production-ready and highly reliable +- No need to manage proxies or handle rate limiting +- Excellent success rate on most websites +- Handles both static and dynamic content +- Built-in content cleaning and formatting +- Fast response times through Tavily's distributed infrastructure + +Setup: +1. Create a Tavily account at [app.tavily.com](https://app.tavily.com) +2. Get your API key from the dashboard +3. Install the Tavily Python SDK: + ```bash + pip install tavily-python + ``` +4. Set your Tavily API key: + ```bash + export TAVILY_API_KEY="your-api-key" + ``` + +Usage Considerations: +- Requires a Tavily API key and account +- API calls are metered based on your Tavily plan +- Best for production environments where reliability is crucial +- Ideal for businesses and applications that need consistent scraping results + +## Additional Setup for Selenium + +If you choose to use Selenium (SCRAPER="browser"), you'll need to: + +1. Install the Selenium package: + ``` + pip install selenium + ``` + +2. Download the appropriate WebDriver for your browser: + - For Chrome: [ChromeDriver](https://sites.google.com/a/chromium.org/chromedriver/downloads) + - For Firefox: [GeckoDriver](https://github.com/mozilla/geckodriver/releases) + - For Safari: Built-in, no download required + + Ensure the WebDriver is in your system's PATH. + +## Choosing the Right Method + +- Use BeautifulSoup (static) for: + - Simple websites with mostly static content + - Scenarios where speed is a priority + - When you don't need to interact with the page + +- Use Selenium (dynamic) for: + - Websites with content loaded via JavaScript + - Sites that require scrolling or clicking to load more content + - When you need to simulate user interactions + +## Troubleshooting + +- If Selenium fails to start, ensure you have the correct WebDriver installed and it's in your system's PATH. +- If you encounter an `ImportError` related to Selenium, make sure you've installed the Selenium package. +- If the scraper misses expected content, try switching between static and dynamic scraping to see which works better for your target website. + +Remember, the choice between static and dynamic scraping can significantly impact the quality and completeness of the data GPT Researcher can gather. Choose the method that best suits your research needs and the websites you're targeting. \ No newline at end of file diff --git a/docs/docs/gpt-researcher/gptr/troubleshooting.md b/docs/docs/gpt-researcher/gptr/troubleshooting.md new file mode 100644 index 0000000000000000000000000000000000000000..4eb444adb505b0f5289b51a963f557a1c21bd3a3 --- /dev/null +++ b/docs/docs/gpt-researcher/gptr/troubleshooting.md @@ -0,0 +1,56 @@ +# Troubleshooting + +We're constantly working to provide a more stable version. If you're running into any issues, please first check out the resolved issues or ask us via our [Discord community](https://discord.gg/QgZXvJAccX). + +### model: gpt-4 does not exist +This relates to not having permission to use gpt-4 yet. Based on OpenAI, it will be [widely available for all by end of July](https://help.openai.com/en/articles/7102672-how-can-i-access-gpt-4). + +### cannot load library 'gobject-2.0-0' + +The issue relates to the library WeasyPrint (which is used to generate PDFs from the research report). Please follow this guide to resolve it: https://doc.courtbouillon.org/weasyprint/stable/first_steps.html + +Or you can install this package manually + +In case of MacOS you can install this lib using +`brew install glib pango` +If you face an issue with linking afterward, you can try running `brew link glib` + +In case of Linux you can install this lib using +`sudo apt install libglib2.0-dev` + +### cannot load library 'pango' + +In case of MacOS you can install this lib using +`brew install pango` + +In case of Linux you can install this lib using +`sudo apt install libpango-1.0-0` + +**Workaround for Mac M chip users** + +If the above solutions don't work, you can try the following: +- Install a fresh version of Python 3.11 pointed to brew: +`brew install python@3.11` +- Install the required libraries: +`brew install pango glib gobject-introspection` +- Install the required GPT Researcher Python packages: +`pip3.11 install -r requirements.txt` +- Run the app with Python 3.11 (using brew): +`python3.11 -m uvicorn main:app --reload` + +### Error processing the url + +We're using [Selenium](https://www.selenium.dev) for site scraping. Some sites fail to be scraped. In these cases, restart and try running again. + + +### Chrome version issues + +Many users have an issue with their chromedriver because the latest chrome browser version doesn't have a compatible chrome driver yet. + +To downgrade your Chrome web browser using [slimjet](https://www.slimjet.com/chrome/google-chrome-old-version.php), follow these steps. First, visit the website and scroll down to find the list of available older Chrome versions. Choose the version you wish to install +making sure it's compatible with your operating system. +Once you've selected the desired version, click on the corresponding link to download the installer. Before proceeding with the installation, it's crucial to uninstall your current version of Chrome to avoid conflicts. + +It's important to check if the version you downgrade to, has a chromedriver available in the official [chrome driver website](https://chromedriver.chromium.org/downloads) + +**If none of the above work, you can [try out our hosted beta](https://app.tavily.com)** \ No newline at end of file diff --git a/docs/docs/gpt-researcher/llms/llms.md b/docs/docs/gpt-researcher/llms/llms.md new file mode 100644 index 0000000000000000000000000000000000000000..c32451f8535f4c9f3c4f76e2721097829096694a --- /dev/null +++ b/docs/docs/gpt-researcher/llms/llms.md @@ -0,0 +1,285 @@ +# Configure LLM + +As described in the [introduction](/docs/gpt-researcher/gptr/config), the default LLM and embedding is OpenAI due to its superior performance and speed. +With that said, GPT Researcher supports various open/closed source LLMs and embeddings, and you can easily switch between them by updating the `SMART_LLM`, `FAST_LLM` and `EMBEDDING` env variables. You might also need to include the provider API key and corresponding configuration params. + +Current supported LLMs are `openai`, `anthropic`, `azure_openai`, `cohere`, `google_vertexai`, `google_genai`, `fireworks`, `ollama`, `together`, `mistralai`, `huggingface`, `groq`, `bedrock` and `litellm`. + +Current supported embeddings are `openai`, `azure_openai`, `cohere`, `google_vertexai`, `google_genai`, `fireworks`, `ollama`, `together`, `mistralai`, `huggingface`, `nomic` ,`voyageai` and `bedrock`. + +To learn more about support customization options see [here](/gpt-researcher/config). + +**Please note**: GPT Researcher is optimized and heavily tested on GPT models. Some other models might run into context limit errors, and unexpected responses. +Please provide any feedback in our [Discord community](https://discord.gg/DUmbTebB) channel, so we can better improve the experience and performance. + +Below you can find examples for how to configure the various supported LLMs. + +## OpenAI + +```bash +# set the custom OpenAI API key +OPENAI_API_KEY=[Your Key] + +# specify llms +FAST_LLM="openai:gpt-4o-mini" +SMART_LLM="openai:gpt-4o" +STRATEGIC_LLM="openai:o1-preview" + +# specify embedding +EMBEDDING="openai:text-embedding-3-small" +``` + + +## Custom LLM + +Create a local OpenAI API using [llama.cpp Server](https://github.com/ggerganov/llama.cpp/blob/master/examples/server/README.md#quick-start). + +For custom LLM, specify "openai:{your-llm}" +```bash +# set the custom OpenAI API url +OPENAI_BASE_URL="http://localhost:1234/v1" +# set the custom OpenAI API key +OPENAI_API_KEY="dummy_key" + +# specify custom llms +FAST_LLM="openai:your_fast_llm" +SMART_LLM="openai:your_smart_llm" +STRATEGIC_LLM="openai:your_strategic_llm" +``` + +For custom embedding, set "custom:{your-embedding}" +```bash +# set the custom OpenAI API url +OPENAI_BASE_URL="http://localhost:1234/v1" +# set the custom OpenAI API key +OPENAI_API_KEY="dummy_key" + +# specify the custom embedding model +EMBEDDING="custom:your_embedding" +``` + + +## Azure OpenAI + +See also the documentation in the Langchain [Azure OpenAI](https://api.python.langchain.com/en/latest/chat_models/langchain_openai.chat_models.azure.AzureChatOpenAI.html) page. + +On Azure OpenAI you will need to create deployments for each model you want to use. Please also specify the model names/deployment names in your `.env` file: + +Required Embedding Model: +To ensure optimal performance, GPT Researcher requires the text-embedding-3-large model. Please deploy this specific model to your Azure Endpoint. + +```bash +AZURE_OPENAI_API_KEY=[Your Key] +AZURE_OPENAI_ENDPOINT=https://{your-endpoint}.openai.azure.com/ +OPENAI_API_VERSION=2024-05-01-preview + +# note that the deployment name must be the same as the model name +FAST_LLM=azure_openai:gpt-4o-mini +SMART_LLM=azure_openai:gpt-4o +STRATEGIC_LLM=azure_openai:o1-preview + + +``` + + +## Ollama + +GPT Researcher supports both Ollama LLMs and embeddings. You can choose each or both. +To use [Ollama](http://www.ollama.com) you can set the following environment variables + +```bash +OLLAMA_BASE_URL=http://localhost:11434 +FAST_LLM="ollama:llama3" +SMART_LLM="ollama:llama3" +STRATEGIC_LLM="ollama:llama3" + +EMBEDDING="ollama:nomic-embed-text" +``` + +## Groq + +GroqCloud provides advanced AI hardware and software solutions designed to deliver amazingly fast AI inference performance. +To leverage Groq in GPT-Researcher, you will need a GroqCloud account and an API Key. (__NOTE:__ Groq has a very _generous free tier_.) + +### Sign up +- You can signup here: [https://console.groq.com/login](https://console.groq.com/login) +- Once you are logged in, you can get an API Key here: [https://console.groq.com/keys](https://console.groq.com/keys) + +- Once you have an API key, you will need to add it to your `systems environment` using the variable name: +`GROQ_API_KEY="*********************"` + +### Update env vars +And finally, you will need to configure the GPT-Researcher Provider and Model variables: + +```bash +GROQ_API_KEY=[Your Key] + +# Set one of the LLM models supported by Groq +FAST_LLM="groq:Mixtral-8x7b-32768" +SMART_LLM="groq:Mixtral-8x7b-32768" +STRATEGIC_LLM="groq:Mixtral-8x7b-32768" +``` + +__NOTE:__ As of the writing of this Doc (May 2024), the available Language Models from Groq are: + +* Llama3-70b-8192 +* Llama3-8b-8192 +* Mixtral-8x7b-32768 +* Gemma-7b-it + + +## Anthropic + +Refer to Anthropic [Getting started page](https://docs.anthropic.com/en/api/getting-started) to obtain Anthropic API key. Update the corresponding env vars, for example: +```bash +ANTHROPIC_API_KEY=[Your key] +FAST_LLM="anthropic:claude-2.1" +SMART_LLM="anthropic:claude-3-opus-20240229" +STRATEGIC_LLM="anthropic:claude-3-opus-20240229" +``` + +Anthropic does not offer its own embedding model. + + +## Mistral AI + +Sign up for a [Mistral API key](https://console.mistral.ai/users/api-keys/). +Then update the corresponding env vars, for example: +```bash +MISTRAL_API_KEY=[Your key] +FAST_LLM="mistralai:open-mistral-7b" +SMART_LLM="mistralai:mistral-large-latest" +STRATEGIC_LLM="mistralai:mistral-large-latest" + +EMBEDDING="mistralai:mistral-embed" +``` + + +## Together AI +[Together AI](https://www.together.ai/) offers an API to query [50+ leading open-source models](https://docs.together.ai/docs/inference-models) in a couple lines of code. +Then update corresponding env vars, for example: +```bash +TOGETHER_API_KEY=[Your key] +FAST_LLM="together:meta-llama/Llama-3-8b-chat-hf" +SMART_LLM="together:meta-llama/Llama-3-70b-chat-hf" +STRATEGIC_LLM="together:meta-llama/Llama-3-70b-chat-hf" + +EMBEDDING="mistralai:nomic-ai/nomic-embed-text-v1.5" +``` + + +## HuggingFace + +This integration requires a bit of extra work. Follow [this guide](https://python.langchain.com/v0.1/docs/integrations/chat/huggingface/) to learn more. +After you've followed the tutorial above, update the env vars: +```bash +HUGGINGFACE_API_KEY=[Your key] +FAST_LLM="huggingface:HuggingFaceH4/zephyr-7b-beta" +SMART_LLM="huggingface:HuggingFaceH4/zephyr-7b-beta" +STRATEGIC_LLM="huggingface:HuggingFaceH4/zephyr-7b-beta" + +EMBEDDING="sentence-transformers/all-MiniLM-L6-v2" +``` + + +## Google Gemini + +Sign up [here](https://ai.google.dev/gemini-api/docs/api-key) for obtaining a Google Gemini API Key and update the following env vars: +```bash +GOOGLE_API_KEY=[Your key] +FAST_LLM="google_genai:gemini-1.5-flash" +SMART_LLM="google_genai:gemini-1.5-pro" +STRATEGIC_LLM="google_genai:gemini-1.5-pro" + +EMBEDDING="google_genai:models/text-embedding-004" +``` + + +## Google VertexAI + +```bash +FAST_LLM="google_vertexai:gemini-1.5-flash-001" +SMART_LLM="google_vertexai:gemini-1.5-pro-001" +STRATEGIC_LLM="google_vertexai:gemini-1.5-pro-001" + +EMBEDDING="google_vertexai:text-embedding-004" +``` + + +## Cohere + +```bash +COHERE_API_KEY=[Your key] +FAST_LLM="cohere:command" +SMART_LLM="cohere:command-nightly" +STRATEGIC_LLM="cohere:command-nightly" + +EMBEDDING="cohere:embed-english-v3.0" +``` + + +## Fireworks + +```bash +FIREWORKS_API_KEY=[Your key] +base_url="https://api.fireworks.ai/inference/v1/completions" +FAST_LLM="fireworks:accounts/fireworks/models/mixtral-8x7b-instruct" +SMART_LLM="fireworks:accounts/fireworks/models/mixtral-8x7b-instruct" +STRATEGIC_LLM="fireworks:accounts/fireworks/models/mixtral-8x7b-instruct" + +EMBEDDING="fireworks:nomic-ai/nomic-embed-text-v1.5" +``` + + +## Bedrock + +```bash +FAST_LLM="bedrock:anthropic.claude-3-sonnet-20240229-v1:0" +SMART_LLM="bedrock:anthropic.claude-3-sonnet-20240229-v1:0" +STRATEGIC_LLM="bedrock:anthropic.claude-3-sonnet-20240229-v1:0" + +EMBEDDING="bedrock:amazon.titan-embed-text-v2:0" +``` + + +## LiteLLM + +```bash +FAST_LLM="litellm:perplexity/pplx-7b-chat" +SMART_LLM="litellm:perplexity/pplx-70b-chat" +STRATEGIC_LLM="litellm:perplexity/pplx-70b-chat" +``` + + +## xAI + +```bash +FAST_LLM="xai:grok-beta" +SMART_LLM="xai:grok-beta" +STRATEGIC_LLM="xai:grok-beta" +``` + + +## DeepSeek +```bash +DEEPSEEK_API_KEY=[Your key] +FAST_LLM="deepseek:deepseek-chat" +SMART_LLM="deepseek:deepseek-chat" +STRATEGIC_LLM="deepseek:deepseek-chat" +``` + + +## Other Embedding Models + +### Nomic + +```bash +EMBEDDING="nomic:nomic-embed-text-v1.5" +``` + +### VoyageAI + +```bash +VOYAGE_API_KEY=[Your Key] +EMBEDDING="voyageai:voyage-law-2" +``` diff --git a/docs/docs/gpt-researcher/llms/running-with-ollama.md b/docs/docs/gpt-researcher/llms/running-with-ollama.md new file mode 100644 index 0000000000000000000000000000000000000000..6ecea528f5fdee03ee0709b18aebb66faa0e9aef --- /dev/null +++ b/docs/docs/gpt-researcher/llms/running-with-ollama.md @@ -0,0 +1,112 @@ +# Running with Ollama + +Ollama is a platform that allows you to deploy and manage custom language models. This guide will walk you through deploying a custom language model on Ollama. + +Read on to understand how to install a Custom LLM with the Ollama WebUI, and how to query it with GPT-Researcher. + + +## Fetching the Desired LLM Models + +After deploying Ollama WebUI, you'll want to enter the [Open WebUI Admin App](https://github.com/open-webui/open-webui/tree/main) & download a custom LLM. + +Choose a model from [Ollama's Library of LLM's](https://ollama.com/library?sort=popular) + +Paste the model name & size into the Web UI: + +Screen Shot 2024-08-27 at 23 26 28 + +For our example, let's choose to download the `qwen2:1.5b` model. + +This model now automatically becomes available via your Server's out-of-the-box API - we'll leverage it within our GPT-Researcher .env file in the next step. + + +## Querying your Custom LLM with GPT-Researcher + +If you deploy ollama locally, a .env like so, should enable powering GPT-Researcher with Ollama: + +```bash +OPENAI_API_KEY="123" +OPENAI_API_BASE="http://127.0.0.1:11434/v1" +OLLAMA_BASE_URL="http://127.0.0.1:11434/" +FAST_LLM="ollama:qwen2:1.5b" +SMART_LLM="ollama:qwen2:1.5b" +EMBEDDING="ollama:all-minilm:22m" +``` + +Replace `FAST_LLM` & `SMART_LLM` with the model you downloaded from the Elestio Web UI in the previous step. + + +## Run LLM Test Script for GPTR + +And here's a custom python script you can use to query your custom LLM: + +```python + +import os +import asyncio +import logging +logging.basicConfig(level=logging.DEBUG) +from gpt_researcher.llm_provider.generic import GenericLLMProvider +from gpt_researcher.utils.llm import get_llm + +OLLAMA_BASE_URL = "https://ollama-ug3qr-u21899.vm.elestio.app:57987" +LLM_MODEL = "llama3.1" + +# Create the GenericLLMProvider instance +llm_provider = get_llm( + "ollama", + base_url=OLLAMA_BASE_URL, + model=LLM_MODEL, + temperature=0.7, + max_tokens=2000, + verify_ssl=False # Add this line +) + +# Test the connection with a simple query +messages = [{"role": "user", "content": "sup?"}] + +async def test_ollama(): + try: + response = await llm_provider.get_chat_response(messages, stream=False) + print("Ollama response:", response) + except Exception as e: + print(f"Error: {e}") + +# Run the async function +asyncio.run(test_ollama()) + +``` + +Replace `OLLAMA_BASE_URL` with the URL of your Ollama instance, and `LLM_MODEL` with the model you downloaded from the Ollama Web UI. + +Run the script to test the connection with your custom LLM. + + +## Deploy Ollama on Elestio + +Elestio is a platform that allows you to deploy and manage custom language models. This guide will walk you through deploying a custom language model on Elestio. + +You can deploy an [Open WebUI](https://github.com/open-webui/open-webui/tree/main) server with [Elestio](https://elest.io/open-source/ollama) + +Here's an example .env file that will enable powering GPT-Researcher with Elestio: + +```bash +OPENAI_API_KEY="123" +OPENAI_API_BASE="https://.vm.elestio.app:57987/v1" +OLLAMA_BASE_URL="https://.vm.elestio.app:57987/" +FAST_LLM="openai:qwen2:1.5b" +SMART_LLM="openai:qwen2:1.5b" +EMBEDDING="ollama:all-minilm:22m" +``` + +#### Disable Elestio Authentication or Add Auth Headers + +To remove the basic auth you have to follow the below steps: +Go to your service -> Security -> at last Nginx -> in that find the below code: + +```bash +auth_basic "Authentication"; +auth_basic_user_file /etc/nginx/conf.d/.htpasswd; +``` + +Comment these both these lines out and click the button "Update & Restart" to reflect the changes. diff --git a/docs/docs/gpt-researcher/llms/testing-your-llm.md b/docs/docs/gpt-researcher/llms/testing-your-llm.md new file mode 100644 index 0000000000000000000000000000000000000000..ae6e67046fbdcfc0ab6d85c2e6bd2543c7e542e8 --- /dev/null +++ b/docs/docs/gpt-researcher/llms/testing-your-llm.md @@ -0,0 +1,30 @@ +# Testing your LLM + +Here is a snippet of code to help you verify that your LLM-related environment variables are set up correctly. + +```python +from gpt_researcher.config.config import Config +from gpt_researcher.utils.llm import create_chat_completion +import asyncio +from dotenv import load_dotenv +load_dotenv() + +async def main(): + cfg = Config() + + try: + report = await create_chat_completion( + model=cfg.smart_llm_model, + messages = [{"role": "user", "content": "sup?"}], + temperature=0.35, + llm_provider=cfg.smart_llm_provider, + stream=True, + max_tokens=cfg.smart_token_limit, + llm_kwargs=cfg.llm_kwargs + ) + except Exception as e: + print(f"Error in calling LLM: {e}") + +# Run the async function +asyncio.run(main()) +``` \ No newline at end of file diff --git a/docs/docs/gpt-researcher/multi_agents/langgraph.md b/docs/docs/gpt-researcher/multi_agents/langgraph.md new file mode 100644 index 0000000000000000000000000000000000000000..b01badc154c11f55020211312ece0da776917b2f --- /dev/null +++ b/docs/docs/gpt-researcher/multi_agents/langgraph.md @@ -0,0 +1,148 @@ +# LangGraph + +[LangGraph](https://python.langchain.com/docs/langgraph) is a library for building stateful, multi-actor applications with LLMs. +This example uses Langgraph to automate the process of an in depth research on any given topic. + +## Use case +By using Langgraph, the research process can be significantly improved in depth and quality by leveraging multiple agents with specialized skills. +Inspired by the recent [STORM](https://arxiv.org/abs/2402.14207) paper, this example showcases how a team of AI agents can work together to conduct research on a given topic, from planning to publication. + +An average run generates a 5-6 page research report in multiple formats such as PDF, Docx and Markdown. + +Please note: This example uses the OpenAI API only for optimized performance. + +## The Multi Agent Team +The research team is made up of 7 AI agents: +- **Human** - The human in the loop that oversees the process and provides feedback to the agents. +- **Chief Editor** - Oversees the research process and manages the team. This is the "master" agent that coordinates the other agents using Langgraph. +- **Researcher** (gpt-researcher) - A specialized autonomous agent that conducts in depth research on a given topic. +- **Editor** - Responsible for planning the research outline and structure. +- **Reviewer** - Validates the correctness of the research results given a set of criteria. +- **Revisor** - Revises the research results based on the feedback from the reviewer. +- **Writer** - Responsible for compiling and writing the final report. +- **Publisher** - Responsible for publishing the final report in various formats. + +## How it works +Generally, the process is based on the following stages: +1. Planning stage +2. Data collection and analysis +3. Review and revision +4. Writing and submission +5. Publication + +### Architecture +
+ +
+
+ +### Steps +More specifically (as seen in the architecture diagram) the process is as follows: +- Browser (gpt-researcher) - Browses the internet for initial research based on the given research task. +- Editor - Plans the report outline and structure based on the initial research. +- For each outline topic (in parallel): + - Researcher (gpt-researcher) - Runs an in depth research on the subtopics and writes a draft. + - Reviewer - Validates the correctness of the draft given a set of criteria and provides feedback. + - Revisor - Revises the draft until it is satisfactory based on the reviewer feedback. +- Writer - Compiles and writes the final report including an introduction, conclusion and references section from the given research findings. +- Publisher - Publishes the final report to multi formats such as PDF, Docx, Markdown, etc. + +## How to run +1. Install required packages: + ```bash + pip install -r requirements.txt + ``` +3. Update env variables + ```bash + export OPENAI_API_KEY={Your OpenAI API Key here} + export TAVILY_API_KEY={Your Tavily API Key here} + ``` +2. Run the application: + ```bash + python main.py + ``` + +## Usage +To change the research query and customize the report, edit the `task.json` file in the main directory. +#### Task.json contains the following fields: +- `query` - The research query or task. +- `model` - The OpenAI LLM to use for the agents. +- `max_sections` - The maximum number of sections in the report. Each section is a subtopic of the research query. +- `include_human_feedback` - If true, the user can provide feedback to the agents. If false, the agents will work autonomously. +- `publish_formats` - The formats to publish the report in. The reports will be written in the `output` directory. +- `source` - The location from which to conduct the research. Options: `web` or `local`. For local, please add `DOC_PATH` env var. +- `follow_guidelines` - If true, the research report will follow the guidelines below. It will take longer to complete. If false, the report will be generated faster but may not follow the guidelines. +- `guidelines` - A list of guidelines that the report must follow. +- `verbose` - If true, the application will print detailed logs to the console. + +#### For example: +```json +{ + "query": "Is AI in a hype cycle?", + "model": "gpt-4o", + "max_sections": 3, + "publish_formats": { + "markdown": true, + "pdf": true, + "docx": true + }, + "include_human_feedback": false, + "source": "web", + "follow_guidelines": true, + "guidelines": [ + "The report MUST fully answer the original question", + "The report MUST be written in apa format", + "The report MUST be written in english" + ], + "verbose": true +} +``` + +## To Deploy + +```shell +pip install langgraph-cli +langgraph up +``` + +From there, see documentation [here](https://github.com/langchain-ai/langgraph-example) on how to use the streaming and async endpoints, as well as the playground. + +## NextJS Frontend App + +The React app (located in `frontend` directory) is our Frontend 2.0 which we hope will enable us to display the robustness of the backend on the frontend, as well. + +It comes with loads of added features, such as: + - a drag-n-drop user interface for uploading and deleting files to be used as local documents by GPTResearcher. + - a GUI for setting your GPTR environment variables. + - the ability to trigger the multi_agents flow via the Backend Module or Langgraph Cloud Host (currently in closed beta). + - stability fixes + - and more coming soon! + +### Run the NextJS React App with Docker + +> **Step 1** - [Install Docker](https://docs.gptr.dev/docs/gpt-researcher/getting-started/getting-started-with-docker) + +> **Step 2** - Clone the '.env.example' file, add your API Keys to the cloned file and save the file as '.env' + +> **Step 3** - Within the docker-compose file comment out services that you don't want to run with Docker. + +```bash +$ docker-compose up --build +``` + +> **Step 4** - By default, if you haven't uncommented anything in your docker-compose file, this flow will start 2 processes: + - the Python server running on localhost:8000 + - the React app running on localhost:3000 + +Visit localhost:3000 on any browser and enjoy researching! + + +### Run the NextJS React App with NPM + +```bash +cd frontend +nvm install 18.17.0 +nvm use v18.17.0 +npm install --legacy-peer-deps +npm run dev +``` \ No newline at end of file diff --git a/docs/docs/gpt-researcher/search-engines/retrievers.md b/docs/docs/gpt-researcher/search-engines/retrievers.md new file mode 100644 index 0000000000000000000000000000000000000000..2fe75ab09a25f2cedeea4cd33dd5dedb33342b48 --- /dev/null +++ b/docs/docs/gpt-researcher/search-engines/retrievers.md @@ -0,0 +1,75 @@ +# Retrievers + +Retrievers are search engines used to find the most relevant documents for a given research task. +You can specify your preferred web search or use any custom retriever of your choice. + +## Web Search Engines + +GPT Researcher defaults to using the [Tavily](https://app.tavily.com) search engine for retrieving search results. +But you can also use other search engines by specifying the `RETRIEVER` env var. Please note that each search engine has its own API Key requirements and usage limits. + +For example: + +```bash +RETRIEVER=bing +``` + +You can also specify multiple retrievers by separating them with commas. The system will use each specified retriever in sequence. +For example: + +```bash +RETRIEVER=tavily, arxiv +``` + +Thanks to our community, we have integrated the following web search engines: + +- [Tavily](https://app.tavily.com) - Default +- [Bing](https://www.microsoft.com/en-us/bing/apis/bing-web-search-api) - Env: `RETRIEVER=bing` +- [Google](https://developers.google.com/custom-search/v1/overview) - Env: `RETRIEVER=google` +- [SearchApi](https://www.searchapi.io/) - Env: `RETRIEVER=searchapi` +- [Serp API](https://serpapi.com/) - Env: `RETRIEVER=serpapi` +- [Serper](https://serper.dev/) - Env: `RETRIEVER=serper` +- [Searx](https://searx.github.io/searx/) - Env: `RETRIEVER=searx` +- [Duckduckgo](https://pypi.org/project/duckduckgo-search/) - Env: `RETRIEVER=duckduckgo` +- [Arxiv](https://info.arxiv.org/help/api/index.html) - Env: `RETRIEVER=arxiv` +- [Exa](https://docs.exa.ai/reference/getting-started) - Env: `RETRIEVER=exa` +- [PubMedCentral](https://www.ncbi.nlm.nih.gov/home/develop/api/) - Env: `RETRIEVER=pubmed_central` + +## Custom Retrievers + +You can also use any custom retriever of your choice by specifying the `RETRIEVER=custom` env var. +Custom retrievers allow you to use any search engine that provides an API to retrieve documents and is widely used for enterprise research tasks. + +In addition to setting the `RETRIEVER` env, you also need to set the following env vars: + +- `RETRIEVER_ENDPOINT`: The endpoint URL of the custom retriever. +- Additional arguments required by the retriever should be prefixed with `RETRIEVER_ARG_` (e.g., RETRIEVER_ARG_API_KEY). + +### Example + +```bash +RETRIEVER=custom +RETRIEVER_ENDPOINT=https://api.myretriever.com +RETRIEVER_ARG_API_KEY=YOUR_API_KEY +``` + +### Response Format + +For the custom retriever to work correctly, the response from the endpoint should be in the following format: + +```json +[ + { + "url": "http://example.com/page1", + "raw_content": "Content of page 1" + }, + { + "url": "http://example.com/page2", + "raw_content": "Content of page 2" + } +] +``` + +The system assumes this response format and processes the list of sources accordingly. + +Missing a retriever? Feel free to contribute to this project by submitting issues or pull requests on our [GitHub](https://github.com/assafelovic/gpt-researcher) page. diff --git a/docs/docs/gpt-researcher/search-engines/test-your-retriever.md b/docs/docs/gpt-researcher/search-engines/test-your-retriever.md new file mode 100644 index 0000000000000000000000000000000000000000..e9aff2b50b0eeaffb6c1400ae2b2ef14be77074e --- /dev/null +++ b/docs/docs/gpt-researcher/search-engines/test-your-retriever.md @@ -0,0 +1,68 @@ +# Testing your Retriever + +To test your retriever, you can use the following code snippet. The script will search for a sub-query and display the search results. + +```python +import asyncio +from dotenv import load_dotenv +from gpt_researcher.config.config import Config +from gpt_researcher.actions.retriever import get_retrievers +from gpt_researcher.skills.researcher import ResearchConductor +import pprint +# Load environment variables from .env file +load_dotenv() + +async def test_scrape_data_by_query(): + # Initialize the Config object + config = Config() + + # Retrieve the retrievers based on the current configuration + retrievers = get_retrievers({}, config) + print("Retrievers:", retrievers) + + # Create a mock researcher object with necessary attributes + class MockResearcher: + def init(self): + self.retrievers = retrievers + self.cfg = config + self.verbose = True + self.websocket = None + self.scraper_manager = None # Mock or implement scraper manager + self.vector_store = None # Mock or implement vector store + + researcher = MockResearcher() + research_conductor = ResearchConductor(researcher) + # print('research_conductor',dir(research_conductor)) + # print('MockResearcher',dir(researcher)) + # Define a sub-query to test + sub_query = "design patterns for autonomous ai agents" + + # Iterate through all retrievers + for retriever_class in retrievers: + # Instantiate the retriever with the sub-query + retriever = retriever_class(sub_query) + + # Perform the search using the current retriever + search_results = await asyncio.to_thread( + retriever.search, max_results=10 + ) + + print("\033[35mSearch results:\033[0m") + pprint.pprint(search_results, indent=4, width=80) + +if __name__ == "__main__": + asyncio.run(test_scrape_data_by_query()) +``` + +The output of the search results will include the title, body, and href of each search result. For example: + +```json +[{ + "body": "Jun 5, 2024 ... Three AI Design Patterns of Autonomous " + "Agents. Overview of the Three Patterns. Three notable AI " + "design patterns for autonomous agents include:.", + "href": "https://accredianpublication.medium.com/building-smarter-systems-the-role-of-agentic-design-patterns-in-genai-13617492f5df", + "title": "Building Smarter Systems: The Role of Agentic Design " + "Patterns in ..."}, + ...] +``` \ No newline at end of file diff --git a/docs/docs/reference/config/config.md b/docs/docs/reference/config/config.md new file mode 100644 index 0000000000000000000000000000000000000000..69125fdf46f3f9239160f0308eb751543608d9cf --- /dev/null +++ b/docs/docs/reference/config/config.md @@ -0,0 +1,127 @@ +--- +sidebar_label: config +title: config.config +--- + +Configuration class to store the state of bools for different scripts access. + +## Config Objects + +```python +class Config(metaclass=Singleton) +``` + +Configuration class to store the state of bools for different scripts access. + +#### \_\_init\_\_ + +```python +def __init__() -> None +``` + +Initialize the Config class + +#### set\_fast\_llm\_model + +```python +def set_fast_llm_model(value: str) -> None +``` + +Set the fast LLM model value. + +#### set\_smart\_llm\_model + +```python +def set_smart_llm_model(value: str) -> None +``` + +Set the smart LLM model value. + +#### set\_fast\_token\_limit + +```python +def set_fast_token_limit(value: int) -> None +``` + +Set the fast token limit value. + +#### set\_smart\_token\_limit + +```python +def set_smart_token_limit(value: int) -> None +``` + +Set the smart token limit value. + +#### set\_browse\_chunk\_max\_length + +```python +def set_browse_chunk_max_length(value: int) -> None +``` + +Set the browse_website command chunk max length value. + +#### set\_openai\_api\_key + +```python +def set_openai_api_key(value: str) -> None +``` + +Set the OpenAI API key value. + +#### set\_debug\_mode + +```python +def set_debug_mode(value: bool) -> None +``` + +Set the debug mode value. + +## APIKeyError Objects + +```python +class APIKeyError(Exception) +``` + +Exception raised when an API key is not set in config.py or as an environment variable. + +#### check\_openai\_api\_key + +```python +def check_openai_api_key(cfg) -> None +``` + +Check if the OpenAI API key is set in config.py or as an environment variable. + +#### check\_tavily\_api\_key + +```python +def check_tavily_api_key(cfg) -> None +``` + +Check if the Tavily Search API key is set in config.py or as an environment variable. + +#### check\_google\_api\_key + +```python +def check_google_api_key(cfg) -> None +``` + +Check if the Google API key is set in config.py or as an environment variable. + +#### check\_serp\_api\_key + +```python +def check_serp_api_key(cfg) -> None +``` + +Check if the SERP API key is set in config.py or as an environment variable. + +#### check\_searx\_url + +```python +def check_searx_url(cfg) -> None +``` + +Check if the Searx URL is set in config.py or as an environment variable. + diff --git a/docs/docs/reference/config/singleton.md b/docs/docs/reference/config/singleton.md new file mode 100644 index 0000000000000000000000000000000000000000..0b72bff7e8ff9eda45218db9d4eb5f3bb588ae8e --- /dev/null +++ b/docs/docs/reference/config/singleton.md @@ -0,0 +1,31 @@ +--- +sidebar_label: singleton +title: config.singleton +--- + +The singleton metaclass for ensuring only one instance of a class. + +## Singleton Objects + +```python +class Singleton(abc.ABCMeta, type) +``` + +Singleton metaclass for ensuring only one instance of a class. + +#### \_\_call\_\_ + +```python +def __call__(cls, *args, **kwargs) +``` + +Call method for the singleton metaclass. + +## AbstractSingleton Objects + +```python +class AbstractSingleton(abc.ABC, metaclass=Singleton) +``` + +Abstract singleton class for ensuring only one instance of a class. + diff --git a/docs/docs/reference/processing/html.md b/docs/docs/reference/processing/html.md new file mode 100644 index 0000000000000000000000000000000000000000..44808d4ede27ce9f067a0bfe7a1a759290040015 --- /dev/null +++ b/docs/docs/reference/processing/html.md @@ -0,0 +1,43 @@ +--- +sidebar_label: html +title: processing.html +--- + +HTML processing functions + +#### extract\_hyperlinks + +```python +def extract_hyperlinks(soup: BeautifulSoup, + base_url: str) -> list[tuple[str, str]] +``` + +Extract hyperlinks from a BeautifulSoup object + +**Arguments**: + +- `soup` _BeautifulSoup_ - The BeautifulSoup object +- `base_url` _str_ - The base URL + + +**Returns**: + + List[Tuple[str, str]]: The extracted hyperlinks + +#### format\_hyperlinks + +```python +def format_hyperlinks(hyperlinks: list[tuple[str, str]]) -> list[str] +``` + +Format hyperlinks to be displayed to the user + +**Arguments**: + +- `hyperlinks` _List[Tuple[str, str]]_ - The hyperlinks to format + + +**Returns**: + +- `List[str]` - The formatted hyperlinks + diff --git a/docs/docs/reference/processing/text.md b/docs/docs/reference/processing/text.md new file mode 100644 index 0000000000000000000000000000000000000000..954718720eea662a2a1e5f1bab56ccb9cba4c97c --- /dev/null +++ b/docs/docs/reference/processing/text.md @@ -0,0 +1,103 @@ +--- +sidebar_label: text +title: processing.text +--- + +Text processing functions + +#### split\_text + +```python +def split_text(text: str, + max_length: int = 8192) -> Generator[str, None, None] +``` + +Split text into chunks of a maximum length + +**Arguments**: + +- `text` _str_ - The text to split +- `max_length` _int, optional_ - The maximum length of each chunk. Defaults to 8192. + + +**Yields**: + +- `str` - The next chunk of text + + +**Raises**: + +- `ValueError` - If the text is longer than the maximum length + +#### summarize\_text + +```python +def summarize_text(url: str, + text: str, + question: str, + driver: Optional[WebDriver] = None) -> str +``` + +Summarize text using the OpenAI API + +**Arguments**: + +- `url` _str_ - The url of the text +- `text` _str_ - The text to summarize +- `question` _str_ - The question to ask the model +- `driver` _WebDriver_ - The webdriver to use to scroll the page + + +**Returns**: + +- `str` - The summary of the text + +#### scroll\_to\_percentage + +```python +def scroll_to_percentage(driver: WebDriver, ratio: float) -> None +``` + +Scroll to a percentage of the page + +**Arguments**: + +- `driver` _WebDriver_ - The webdriver to use +- `ratio` _float_ - The percentage to scroll to + + +**Raises**: + +- `ValueError` - If the ratio is not between 0 and 1 + +#### create\_message + +```python +def create_message(chunk: str, question: str) -> Dict[str, str] +``` + +Create a message for the chat completion + +**Arguments**: + +- `chunk` _str_ - The chunk of text to summarize +- `question` _str_ - The question to answer + + +**Returns**: + + Dict[str, str]: The message to send to the chat completion + +#### write\_to\_file + +```python +def write_to_file(filename: str, text: str) -> None +``` + +Write text to a file + +**Arguments**: + +- `text` _str_ - The text to write +- `filename` _str_ - The filename to write to + diff --git a/docs/docs/reference/sidebar.json b/docs/docs/reference/sidebar.json new file mode 100644 index 0000000000000000000000000000000000000000..c9819d64dedbc73287c748cf517670cc0d70c59e --- /dev/null +++ b/docs/docs/reference/sidebar.json @@ -0,0 +1,5 @@ +{ + "items": [], + "label": "Reference", + "type": "category" +} \ No newline at end of file diff --git a/docs/docs/roadmap.md b/docs/docs/roadmap.md new file mode 100644 index 0000000000000000000000000000000000000000..649105f604821a0b44ceac95c135f0f663c76941 --- /dev/null +++ b/docs/docs/roadmap.md @@ -0,0 +1,9 @@ +# Roadmap + +We're constantly working on additional features and improvements to our products and services. We're also working on new products and services to help you build better AI applications using [GPT Researcher](https://gptr.dev). + +Our vision is to build the #1 autonomous research agent for AI developers and researchers, and we're excited to have you join us on this journey! + +The roadmap is prioritized based on the following goals: Performance, Quality, Modularity and Conversational flexibility. The roadmap is public and can be found [here](https://trello.com/b/3O7KBePw/gpt-researcher-roadmap). + +Interested in collaborating or contributing? Check out our [contributing page](/docs/contribute) for more information. \ No newline at end of file diff --git a/docs/docs/welcome.md b/docs/docs/welcome.md new file mode 100644 index 0000000000000000000000000000000000000000..3037e55c66b95b677e04fd7bbeb102b0a0cb42b7 --- /dev/null +++ b/docs/docs/welcome.md @@ -0,0 +1,13 @@ +# Welcome + +Hey there! 👋 + +We're a team of AI researchers and developers who are passionate about building the next generation of AI assistants. +Our mission is to empower individuals and organizations with accurate, unbiased, and factual information. + +### GPT Researcher +Quickly accessing relevant and trustworthy information is more crucial than ever. However, we've learned that none of today's search engines provide a suitable tool that provides factual, explicit and objective answers without the need to continuously click and explore multiple sites for a given research task. + +This is why we've built the trending open source **[GPT Researcher](https://github.com/assafelovic/gpt-researcher)**. GPT Researcher is an autonomous agent that takes care of the tedious task of research for you, by scraping, filtering and aggregating over 20+ web sources per a single research task. + +To learn more about GPT Researcher, check out the [documentation page](/docs/gpt-researcher/getting-started/introduction). diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js new file mode 100644 index 0000000000000000000000000000000000000000..af7015cf93db401dd5a7a3d73754fc1e25f42694 --- /dev/null +++ b/docs/docusaurus.config.js @@ -0,0 +1,131 @@ +/** @type {import('@docusaurus/types').DocusaurusConfig} */ +const math = require('remark-math'); +const katex = require('rehype-katex'); + +module.exports = { + title: 'GPT Researcher', + tagline: 'The leading autonomous AI research agent', + url: 'https://docs.gptr.dev', + baseUrl: '/', + onBrokenLinks: 'ignore', + //deploymentBranch: 'master', + onBrokenMarkdownLinks: 'warn', + favicon: 'img/gptr-logo.png', + organizationName: 'assafelovic', + trailingSlash: false, + projectName: 'gpt-researcher', + themeConfig: { + navbar: { + title: 'GPT Researcher', + logo: { + alt: 'GPT Researcher', + src: 'img/gptr-logo.png', + }, + items: [ + { + type: 'doc', + docId: 'welcome', + position: 'left', + label: 'Docs', + }, + + {to: 'blog', label: 'Blog', position: 'left'}, + { + type: 'doc', + docId: 'faq', + position: 'left', + label: 'FAQ', + }, + { + href: 'mailto:assaf.elovic@gmail.com', + position: 'left', + label: 'Contact', + }, + { + href: 'https://github.com/assafelovic/gpt-researcher', + label: 'GitHub', + position: 'right', + }, + ], + }, + footer: { + style: 'dark', + links: [ + { + title: 'Community', + items: [ + { + label: 'Discord', + href: 'https://discord.gg/8YkBcCED5y', + }, + { + label: 'Twitter', + href: 'https://twitter.com/assaf_elovic', + }, + { + label: 'LinkedIn', + href: 'https://www.linkedin.com/in/assafe/', + }, + ], + }, + { + title: 'Company', + items: [ + { + label: 'Homepage', + href: 'https://gptr.dev', + }, + { + label: 'Contact', + href: 'mailto:assafelovic@gmail.com', + }, + ], + }, + ], + copyright: `Copyright © ${new Date().getFullYear()} GPT Researcher.`, + }, + }, + presets: [ + [ + '@docusaurus/preset-classic', + { + docs: { + sidebarPath: require.resolve('./sidebars.js'), + // Please change this to your repo. + editUrl: + 'https://github.com/assafelovic/gpt-researcher/tree/master/docs', + remarkPlugins: [math], + rehypePlugins: [katex], + }, + theme: { + customCss: require.resolve('./src/css/custom.css'), + }, + }, + ], + ], + stylesheets: [ + { + href: "https://cdn.jsdelivr.net/npm/katex@0.13.11/dist/katex.min.css", + integrity: "sha384-Um5gpz1odJg5Z4HAmzPtgZKdTBHZdw8S29IecapCSB31ligYPhHQZMIlWLYQGVoc", + crossorigin: "anonymous", + }, + ], + + plugins: [ + // ... Your other plugins. + [ + require.resolve("@easyops-cn/docusaurus-search-local"), + { + // ... Your options. + // `hashed` is recommended as long-term-cache of index file is possible. + hashed: true, + blogDir:"./blog/" + // For Docs using Chinese, The `language` is recommended to set to: + // ``` + // language: ["en", "zh"], + // ``` + // When applying `zh` in language, please install `nodejieba` in your project. + }, + ], + ], +}; diff --git a/docs/package.json b/docs/package.json new file mode 100644 index 0000000000000000000000000000000000000000..45614930cb289dda5e3c21eec4282fbd0da663b5 --- /dev/null +++ b/docs/package.json @@ -0,0 +1,56 @@ +{ + "name": "website", + "version": "0.0.0", + "private": true, + "resolutions" :{ + "nth-check":"2.0.1", + "trim":"0.0.3", + "got": "11.8.5", + "node-forge": "1.3.0", + "minimatch": "3.0.5", + "loader-utils": "2.0.4", + "eta": "2.0.0", + "@sideway/formula": "3.0.1", + "http-cache-semantics": "4.1.1" + }, + "scripts": { + "docusaurus": "docusaurus", + "start": "docusaurus start", + "build": "docusaurus build", + "swizzle": "docusaurus swizzle", + "deploy": "docusaurus deploy", + "clear": "docusaurus clear", + "serve": "docusaurus serve", + "write-translations": "docusaurus write-translations", + "write-heading-ids": "docusaurus write-heading-ids" + }, + "dependencies": { + "@docusaurus/core": "0.0.0-4193", + "@docusaurus/preset-classic": "0.0.0-4193", + "@easyops-cn/docusaurus-search-local": "^0.21.1", + "@mdx-js/react": "^1.6.21", + "@svgr/webpack": "^5.5.0", + "clsx": "^1.1.1", + "file-loader": "^6.2.0", + "hast-util-is-element": "1.1.0", + "react": "^17.0.1", + "react-dom": "^17.0.1", + "rehype-katex": "4", + "remark-math": "3", + "trim": "^0.0.3", + "url-loader": "^4.1.1", + "minimatch": "3.0.5" + }, + "browserslist": { + "production": [ + ">0.5%", + "not dead", + "not op_mini all" + ], + "development": [ + "last 1 chrome version", + "last 1 firefox version", + "last 1 safari version" + ] + } +} diff --git a/docs/pydoc-markdown.yml b/docs/pydoc-markdown.yml new file mode 100644 index 0000000000000000000000000000000000000000..4c23389ed3a59493b29853b74a66f8eeb38643f6 --- /dev/null +++ b/docs/pydoc-markdown.yml @@ -0,0 +1,16 @@ +loaders: + - type: python + search_path: [../docs] +processors: + - type: filter + skip_empty_modules: true + - type: smart + - type: crossref +renderer: + type: docusaurus + docs_base_path: docs + relative_output_path: reference + relative_sidebar_path: sidebar.json + sidebar_top_level_label: Reference + markdown: + escape_html_in_docstring: false diff --git a/docs/sidebars.js b/docs/sidebars.js new file mode 100644 index 0000000000000000000000000000000000000000..f689e644dd59e1f20738ce16566c8983b23a0596 --- /dev/null +++ b/docs/sidebars.js @@ -0,0 +1,105 @@ +/** + * Creating a sidebar enables you to: + - create an ordered group of docs + - render a sidebar for each doc of that group + - provide next/previous navigation + + The sidebars can be generated from the filesystem, or explicitly defined here. + + Create as many sidebars as you want. + */ + + module.exports = { + docsSidebar: [ + 'welcome', + { + type: 'category', + label: 'Getting Started', + collapsible: true, + collapsed: false, + items: [ + 'gpt-researcher/getting-started/introduction', + 'gpt-researcher/getting-started/how-to-choose', + 'gpt-researcher/getting-started/getting-started', + 'gpt-researcher/getting-started/cli', + 'gpt-researcher/getting-started/getting-started-with-docker', + 'gpt-researcher/getting-started/linux-deployment', + ] + }, + { + type: 'category', + label: 'GPT Researcher', + collapsible: true, + collapsed: true, + items: [ + 'gpt-researcher/gptr/pip-package', + 'gpt-researcher/gptr/example', + 'gpt-researcher/gptr/config', + 'gpt-researcher/gptr/scraping', + 'gpt-researcher/gptr/handling-logs-as-they-stream', + 'gpt-researcher/gptr/querying-the-backend', + 'gpt-researcher/gptr/automated-tests', + 'gpt-researcher/gptr/troubleshooting', + ], + }, + { + type: 'category', + label: 'Frontend', + collapsible: true, + collapsed: true, + items: [ + 'gpt-researcher/frontend/frontend', + 'gpt-researcher/frontend/playing-with-webhooks', + 'gpt-researcher/frontend/logs', + ], + }, + { + type: 'category', + label: 'Custom Context', + collapsible: true, + collapsed: true, + items: [ + 'gpt-researcher/context/tailored-research', + 'gpt-researcher/context/local-docs', + 'gpt-researcher/context/filtering-by-domain', + 'gpt-researcher/context/vector-stores', + ] + }, + { + type: 'category', + label: 'LLM Providers', + collapsible: true, + collapsed: true, + items: [ + 'gpt-researcher/llms/llms', + 'gpt-researcher/llms/running-with-ollama', + 'gpt-researcher/llms/testing-your-llm' + ] + }, + { + type: 'category', + label: 'Search Engines', + collapsible: true, + collapsed: true, + items: [ + 'gpt-researcher/search-engines/retrievers', + 'gpt-researcher/search-engines/test-your-retriever' + ] + }, + { + type: 'category', + label: 'Multi-Agent Frameworks', + collapsible: true, + collapsed: true, + items: [ + 'gpt-researcher/multi_agents/langgraph', + ] + }, + {'Examples': [{type: 'autogenerated', dirName: 'examples'}]}, + 'contribute', + 'roadmap', + 'faq', + ], + // pydoc-markdown auto-generated markdowns from docstrings + referenceSideBar: [require("./docs/reference/sidebar.json")] +}; diff --git a/docs/src/components/HomepageFeatures.js b/docs/src/components/HomepageFeatures.js new file mode 100644 index 0000000000000000000000000000000000000000..6b1ed975ed8f87d211bad4fec701fc2c07e9fa05 --- /dev/null +++ b/docs/src/components/HomepageFeatures.js @@ -0,0 +1,78 @@ +import React from 'react'; +import clsx from 'clsx'; +import { Link } from 'react-router-dom'; +import styles from './HomepageFeatures.module.css'; + +const FeatureList = [ + { + title: 'GPT Researcher', + Svg: require('../../static/img/gptr-logo.png').default, + docLink: './docs/gpt-researcher/getting-started/getting-started', + description: ( + <> + GPT Researcher is an open source autonomous agent designed for comprehensive online research on a variety of tasks. + + ), + }, + /*{ + title: 'Tavily Search API', + Svg: require('../../static/img/tavily.png').default, + docLink: './docs/tavily-api/introduction', + description: ( + <> + Tavily Search API is a search engine optimized for LLMs, optimized for a factual, efficient, and persistent search experience + + ), + },*/ + { + title: 'Multi-Agent Assistant', + Svg: require('../../static/img/multi-agent.png').default, + docLink: './docs/gpt-researcher/multi_agents/langgraph', + description: ( + <> + Learn how a team of AI agents can work together to conduct research on a given topic, from planning to publication. + + ), + }, + { + title: 'Examples and Demos', + Svg: require('../../static/img/examples.png').default, + docLink: './docs/examples/examples', + description: ( + <> + Check out GPT Researcher in action across multiple frameworks and use cases such as hybrid research and long detailed reports. + + ), + }, +]; + +function Feature({Svg, title, description, docLink}) { + return ( +
+
+ {/**/} + {title} +
+
+ +

{title}

+ +

{description}

+
+
+ ); +} + +export default function HomepageFeatures() { + return ( +
+
+
+ {FeatureList.map((props, idx) => ( + + ))} +
+
+
+ ); +} diff --git a/docs/src/components/HomepageFeatures.module.css b/docs/src/components/HomepageFeatures.module.css new file mode 100644 index 0000000000000000000000000000000000000000..6026dd666becd9cc19861785a11f6185aa17e33e --- /dev/null +++ b/docs/src/components/HomepageFeatures.module.css @@ -0,0 +1,13 @@ +/* stylelint-disable docusaurus/copyright-header */ + +.features { + display: flex; + align-items: center; + padding: 2rem 0; + width: 100%; +} + +.featureSvg { + height: 120px; + width: 200px; +} diff --git a/docs/src/css/custom.css b/docs/src/css/custom.css new file mode 100644 index 0000000000000000000000000000000000000000..cf88f3b7152c3e7577923921b5fc938652c1fc00 --- /dev/null +++ b/docs/src/css/custom.css @@ -0,0 +1,100 @@ +:root { + --ifm-font-size-base: 16px; + --ifm-code-font-size: 90%; + + --ifm-color-primary: #0c4da2; + --ifm-color-primary-dark: rgb(11, 69, 146); + --ifm-color-primary-darker: #0a418a; + --ifm-color-primary-darkest: #083671; + --ifm-color-primary-light: #0d55b2; + --ifm-color-primary-lighter: #0e59ba; + --ifm-color-primary-lightest: #1064d3; + + --ifm-color-emphasis-300: #1064d3; + --ifm-link-color: #1064d3; + --ifm-menu-color-active: #1064d3; +} + +.docusaurus-highlight-code-line { +background-color: rgba(0, 0, 0, 0.1); +display: block; +margin: 0 calc(-1 * var(--ifm-pre-padding)); +padding: 0 var(--ifm-pre-padding); +} +html[data-theme='dark'] .docusaurus-highlight-code-line { +background-color: rgb(0, 0, 0, 0.3); +} + +.admonition-content a { +text-decoration: underline; +font-weight: 600; +color: inherit; +} + +a { +font-weight: 600; +} + +.markdown > p { + font-size: 16px; +} + +.navbar { + font-size: 16px; +} + +li { +font-size: 16px; +} + +blockquote { + /* samsung blue with lots of transparency */ + background-color: #0c4da224; +} +@media (prefers-color-scheme: dark) { +:root { + --ifm-hero-text-color: white; +} +} +@media (prefers-color-scheme: dark) { +.hero.hero--primary { --ifm-hero-text-color: white;} +} + +@media (prefers-color-scheme: dark) { +blockquote { + --ifm-color-emphasis-300: var(--ifm-color-primary); + /* border-left: 6px solid var(--ifm-color-emphasis-300); */ +} +} +@media (prefers-color-scheme: dark) { +code { + /* background-color: rgb(41, 45, 62); */ +} +} + + +/* Docusaurus still defaults to their green! */ +@media (prefers-color-scheme: dark) { +.react-toggle-thumb { + border-color: var(--ifm-color-primary) !important; +} +} + + +.header-github-link:hover { +opacity: 0.6; +} + +.header-github-link:before { +content: ''; +width: 24px; +height: 24px; +display: flex; +background: url("data:image/svg+xml,%3Csvg viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath d='M12 .297c-6.63 0-12 5.373-12 12 0 5.303 3.438 9.8 8.205 11.385.6.113.82-.258.82-.577 0-.285-.01-1.04-.015-2.04-3.338.724-4.042-1.61-4.042-1.61C4.422 18.07 3.633 17.7 3.633 17.7c-1.087-.744.084-.729.084-.729 1.205.084 1.838 1.236 1.838 1.236 1.07 1.835 2.809 1.305 3.495.998.108-.776.417-1.305.76-1.605-2.665-.3-5.466-1.332-5.466-5.93 0-1.31.465-2.38 1.235-3.22-.135-.303-.54-1.523.105-3.176 0 0 1.005-.322 3.3 1.23.96-.267 1.98-.399 3-.405 1.02.006 2.04.138 3 .405 2.28-1.552 3.285-1.23 3.285-1.23.645 1.653.24 2.873.12 3.176.765.84 1.23 1.91 1.23 3.22 0 4.61-2.805 5.625-5.475 5.92.42.36.81 1.096.81 2.22 0 1.606-.015 2.896-.015 3.286 0 .315.21.69.825.57C20.565 22.092 24 17.592 24 12.297c0-6.627-5.373-12-12-12'/%3E%3C/svg%3E") + no-repeat; +} + +html[data-theme='dark'] .header-github-link:before { +background: url("data:image/svg+xml,%3Csvg viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath fill='white' d='M12 .297c-6.63 0-12 5.373-12 12 0 5.303 3.438 9.8 8.205 11.385.6.113.82-.258.82-.577 0-.285-.01-1.04-.015-2.04-3.338.724-4.042-1.61-4.042-1.61C4.422 18.07 3.633 17.7 3.633 17.7c-1.087-.744.084-.729.084-.729 1.205.084 1.838 1.236 1.838 1.236 1.07 1.835 2.809 1.305 3.495.998.108-.776.417-1.305.76-1.605-2.665-.3-5.466-1.332-5.466-5.93 0-1.31.465-2.38 1.235-3.22-.135-.303-.54-1.523.105-3.176 0 0 1.005-.322 3.3 1.23.96-.267 1.98-.399 3-.405 1.02.006 2.04.138 3 .405 2.28-1.552 3.285-1.23 3.285-1.23.645 1.653.24 2.873.12 3.176.765.84 1.23 1.91 1.23 3.22 0 4.61-2.805 5.625-5.475 5.92.42.36.81 1.096.81 2.22 0 1.606-.015 2.896-.015 3.286 0 .315.21.69.825.57C20.565 22.092 24 17.592 24 12.297c0-6.627-5.373-12-12-12'/%3E%3C/svg%3E") + no-repeat; +} diff --git a/docs/src/pages/index.js b/docs/src/pages/index.js new file mode 100644 index 0000000000000000000000000000000000000000..79d397bf8d97ccc8b29bfac5ff316e9b524361b7 --- /dev/null +++ b/docs/src/pages/index.js @@ -0,0 +1,40 @@ +import React from 'react'; +import clsx from 'clsx'; +import Layout from '@theme/Layout'; +import Link from '@docusaurus/Link'; +import useDocusaurusContext from '@docusaurus/useDocusaurusContext'; +import styles from './index.module.css'; +import HomepageFeatures from '../components/HomepageFeatures'; + +function HomepageHeader() { + const {siteConfig} = useDocusaurusContext(); + return ( +
+
+

{siteConfig.title}

+

{siteConfig.tagline}

+
+ + Getting Started - 5 min ⏱️ + +
+
+
+ ); +} + +export default function Home() { + const {siteConfig} = useDocusaurusContext(); + return ( + + +
+ +
+
+ ); +} diff --git a/docs/src/pages/index.module.css b/docs/src/pages/index.module.css new file mode 100644 index 0000000000000000000000000000000000000000..5e2483060b8798f1ce0ed95bd0d12cdbf6dddd6f --- /dev/null +++ b/docs/src/pages/index.module.css @@ -0,0 +1,25 @@ +/* stylelint-disable docusaurus/copyright-header */ + +/** + * CSS files with the .module.css suffix will be treated as CSS modules + * and scoped locally. + */ + +.heroBanner { + padding: 5rem 0; + text-align: center; + position: relative; + overflow: hidden; +} + +@media screen and (max-width: 966px) { + .heroBanner { + padding: 2rem; + } +} + +.buttons { + display: flex; + align-items: center; + justify-content: center; +} diff --git a/docs/static/.nojekyll b/docs/static/.nojekyll new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/docs/static/CNAME b/docs/static/CNAME new file mode 100644 index 0000000000000000000000000000000000000000..c5661873f63f93d752bab52acbd9296443f05926 --- /dev/null +++ b/docs/static/CNAME @@ -0,0 +1 @@ +docs.gptr.dev \ No newline at end of file diff --git a/docs/static/img/architecture.png b/docs/static/img/architecture.png new file mode 100644 index 0000000000000000000000000000000000000000..0ad8847db3f74a4e9b792d8221ea7d4a9e6399fc --- /dev/null +++ b/docs/static/img/architecture.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:93f7c083722105b00dc714d372a1075f4d5770b46fa19551dc2b772738f82d89 +size 143143 diff --git a/docs/static/img/banner1.jpg b/docs/static/img/banner1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..deb14833f10e2a37d981f87be324a7c86c871fc6 Binary files /dev/null and b/docs/static/img/banner1.jpg differ diff --git a/docs/static/img/examples.png b/docs/static/img/examples.png new file mode 100644 index 0000000000000000000000000000000000000000..e9062e46a08791e0a45e78f227a812cf15fb62c2 Binary files /dev/null and b/docs/static/img/examples.png differ diff --git a/docs/static/img/gptr-logo.png b/docs/static/img/gptr-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..76ec0b5e92c20a788103416236cbf37bc7829be9 Binary files /dev/null and b/docs/static/img/gptr-logo.png differ diff --git a/docs/static/img/leaderboard.png b/docs/static/img/leaderboard.png new file mode 100644 index 0000000000000000000000000000000000000000..473ad777d8fa654f41d27e7e7039a34ef3be4a27 --- /dev/null +++ b/docs/static/img/leaderboard.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:464154dc6743c09d800b8c7f234e511f173cd13072c58b05f5d639753f9dcf34 +size 235547 diff --git a/docs/static/img/multi-agent.png b/docs/static/img/multi-agent.png new file mode 100644 index 0000000000000000000000000000000000000000..8a4c7ea67f8305c9243520ca47714c7ac62348ba Binary files /dev/null and b/docs/static/img/multi-agent.png differ diff --git a/docs/yarn.lock b/docs/yarn.lock new file mode 100644 index 0000000000000000000000000000000000000000..a68a7223cf484d5b0bfeba6449c8d3990376bede --- /dev/null +++ b/docs/yarn.lock @@ -0,0 +1,8114 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@algolia/autocomplete-core@1.7.2": + version "1.7.2" + resolved "https://registry.npmmirror.com/@algolia/autocomplete-core/-/autocomplete-core-1.7.2.tgz" + integrity sha512-eclwUDC6qfApNnEfu1uWcL/rudQsn59tjEoUYZYE2JSXZrHLRjBUGMxiCoknobU2Pva8ejb0eRxpIYDtVVqdsw== + dependencies: + "@algolia/autocomplete-shared" "1.7.2" + +"@algolia/autocomplete-preset-algolia@1.7.2": + version "1.7.2" + resolved "https://registry.npmmirror.com/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.7.2.tgz" + integrity sha512-+RYEG6B0QiGGfRb2G3MtPfyrl0dALF3cQNTWBzBX6p5o01vCCGTTinAm2UKG3tfc2CnOMAtnPLkzNZyJUpnVJw== + dependencies: + "@algolia/autocomplete-shared" "1.7.2" + +"@algolia/autocomplete-shared@1.7.2": + version "1.7.2" + resolved "https://registry.npmmirror.com/@algolia/autocomplete-shared/-/autocomplete-shared-1.7.2.tgz" + integrity sha512-QCckjiC7xXHIUaIL3ektBtjJ0w7tTA3iqKcAE/Hjn1lZ5omp7i3Y4e09rAr9ZybqirL7AbxCLLq0Ra5DDPKeug== + +"@algolia/cache-browser-local-storage@4.14.2": + version "4.14.2" + resolved "https://registry.npmmirror.com/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.14.2.tgz" + integrity sha512-FRweBkK/ywO+GKYfAWbrepewQsPTIEirhi1BdykX9mxvBPtGNKccYAxvGdDCumU1jL4r3cayio4psfzKMejBlA== + dependencies: + "@algolia/cache-common" "4.14.2" + +"@algolia/cache-common@4.14.2": + version "4.14.2" + resolved "https://registry.npmmirror.com/@algolia/cache-common/-/cache-common-4.14.2.tgz" + integrity sha512-SbvAlG9VqNanCErr44q6lEKD2qoK4XtFNx9Qn8FK26ePCI8I9yU7pYB+eM/cZdS9SzQCRJBbHUumVr4bsQ4uxg== + +"@algolia/cache-in-memory@4.14.2": + version "4.14.2" + resolved "https://registry.npmmirror.com/@algolia/cache-in-memory/-/cache-in-memory-4.14.2.tgz" + integrity sha512-HrOukWoop9XB/VFojPv1R5SVXowgI56T9pmezd/djh2JnVN/vXswhXV51RKy4nCpqxyHt/aGFSq2qkDvj6KiuQ== + dependencies: + "@algolia/cache-common" "4.14.2" + +"@algolia/client-account@4.14.2": + version "4.14.2" + resolved "https://registry.npmmirror.com/@algolia/client-account/-/client-account-4.14.2.tgz" + integrity sha512-WHtriQqGyibbb/Rx71YY43T0cXqyelEU0lB2QMBRXvD2X0iyeGl4qMxocgEIcbHyK7uqE7hKgjT8aBrHqhgc1w== + dependencies: + "@algolia/client-common" "4.14.2" + "@algolia/client-search" "4.14.2" + "@algolia/transporter" "4.14.2" + +"@algolia/client-analytics@4.14.2": + version "4.14.2" + resolved "https://registry.npmmirror.com/@algolia/client-analytics/-/client-analytics-4.14.2.tgz" + integrity sha512-yBvBv2mw+HX5a+aeR0dkvUbFZsiC4FKSnfqk9rrfX+QrlNOKEhCG0tJzjiOggRW4EcNqRmaTULIYvIzQVL2KYQ== + dependencies: + "@algolia/client-common" "4.14.2" + "@algolia/client-search" "4.14.2" + "@algolia/requester-common" "4.14.2" + "@algolia/transporter" "4.14.2" + +"@algolia/client-common@4.14.2": + version "4.14.2" + resolved "https://registry.npmmirror.com/@algolia/client-common/-/client-common-4.14.2.tgz" + integrity sha512-43o4fslNLcktgtDMVaT5XwlzsDPzlqvqesRi4MjQz2x4/Sxm7zYg5LRYFol1BIhG6EwxKvSUq8HcC/KxJu3J0Q== + dependencies: + "@algolia/requester-common" "4.14.2" + "@algolia/transporter" "4.14.2" + +"@algolia/client-personalization@4.14.2": + version "4.14.2" + resolved "https://registry.npmmirror.com/@algolia/client-personalization/-/client-personalization-4.14.2.tgz" + integrity sha512-ACCoLi0cL8CBZ1W/2juehSltrw2iqsQBnfiu/Rbl9W2yE6o2ZUb97+sqN/jBqYNQBS+o0ekTMKNkQjHHAcEXNw== + dependencies: + "@algolia/client-common" "4.14.2" + "@algolia/requester-common" "4.14.2" + "@algolia/transporter" "4.14.2" + +"@algolia/client-search@4.14.2": + version "4.14.2" + resolved "https://registry.npmmirror.com/@algolia/client-search/-/client-search-4.14.2.tgz" + integrity sha512-L5zScdOmcZ6NGiVbLKTvP02UbxZ0njd5Vq9nJAmPFtjffUSOGEp11BmD2oMJ5QvARgx2XbX4KzTTNS5ECYIMWw== + dependencies: + "@algolia/client-common" "4.14.2" + "@algolia/requester-common" "4.14.2" + "@algolia/transporter" "4.14.2" + +"@algolia/events@^4.0.1": + version "4.0.1" + resolved "https://registry.npmmirror.com/@algolia/events/-/events-4.0.1.tgz" + integrity sha512-FQzvOCgoFXAbf5Y6mYozw2aj5KCJoA3m4heImceldzPSMbdyS4atVjJzXKMsfX3wnZTFYwkkt8/z8UesLHlSBQ== + +"@algolia/logger-common@4.14.2": + version "4.14.2" + resolved "https://registry.npmmirror.com/@algolia/logger-common/-/logger-common-4.14.2.tgz" + integrity sha512-/JGlYvdV++IcMHBnVFsqEisTiOeEr6cUJtpjz8zc0A9c31JrtLm318Njc72p14Pnkw3A/5lHHh+QxpJ6WFTmsA== + +"@algolia/logger-console@4.14.2": + version "4.14.2" + resolved "https://registry.npmmirror.com/@algolia/logger-console/-/logger-console-4.14.2.tgz" + integrity sha512-8S2PlpdshbkwlLCSAB5f8c91xyc84VM9Ar9EdfE9UmX+NrKNYnWR1maXXVDQQoto07G1Ol/tYFnFVhUZq0xV/g== + dependencies: + "@algolia/logger-common" "4.14.2" + +"@algolia/requester-browser-xhr@4.14.2": + version "4.14.2" + resolved "https://registry.npmmirror.com/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.14.2.tgz" + integrity sha512-CEh//xYz/WfxHFh7pcMjQNWgpl4wFB85lUMRyVwaDPibNzQRVcV33YS+63fShFWc2+42YEipFGH2iPzlpszmDw== + dependencies: + "@algolia/requester-common" "4.14.2" + +"@algolia/requester-common@4.14.2": + version "4.14.2" + resolved "https://registry.npmmirror.com/@algolia/requester-common/-/requester-common-4.14.2.tgz" + integrity sha512-73YQsBOKa5fvVV3My7iZHu1sUqmjjfs9TteFWwPwDmnad7T0VTCopttcsM3OjLxZFtBnX61Xxl2T2gmG2O4ehg== + +"@algolia/requester-node-http@4.14.2": + version "4.14.2" + resolved "https://registry.npmmirror.com/@algolia/requester-node-http/-/requester-node-http-4.14.2.tgz" + integrity sha512-oDbb02kd1o5GTEld4pETlPZLY0e+gOSWjWMJHWTgDXbv9rm/o2cF7japO6Vj1ENnrqWvLBmW1OzV9g6FUFhFXg== + dependencies: + "@algolia/requester-common" "4.14.2" + +"@algolia/transporter@4.14.2": + version "4.14.2" + resolved "https://registry.npmmirror.com/@algolia/transporter/-/transporter-4.14.2.tgz" + integrity sha512-t89dfQb2T9MFQHidjHcfhh6iGMNwvuKUvojAj+JsrHAGbuSy7yE4BylhLX6R0Q1xYRoC4Vvv+O5qIw/LdnQfsQ== + dependencies: + "@algolia/cache-common" "4.14.2" + "@algolia/logger-common" "4.14.2" + "@algolia/requester-common" "4.14.2" + +"@ampproject/remapping@^2.1.0": + version "2.2.0" + resolved "https://registry.npmmirror.com/@ampproject/remapping/-/remapping-2.2.0.tgz" + integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== + dependencies: + "@jridgewell/gen-mapping" "^0.1.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.18.6", "@babel/code-frame@^7.22.13", "@babel/code-frame@^7.8.3": + version "7.22.13" + resolved "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.13.tgz" + integrity sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w== + dependencies: + "@babel/highlight" "^7.22.13" + chalk "^2.4.2" + +"@babel/compat-data@^7.17.7", "@babel/compat-data@^7.20.0", "@babel/compat-data@^7.20.1": + version "7.20.1" + resolved "https://registry.npmmirror.com/@babel/compat-data/-/compat-data-7.20.1.tgz" + integrity sha512-EWZ4mE2diW3QALKvDMiXnbZpRvlj+nayZ112nK93SnhqOtpdsbVD4W+2tEoT3YNBAG9RBR0ISY758ZkOgsn6pQ== + +"@babel/core@7.12.9": + version "7.12.9" + resolved "https://registry.npmmirror.com/@babel/core/-/core-7.12.9.tgz" + integrity sha512-gTXYh3M5wb7FRXQy+FErKFAv90BnlOuNn1QkCK2lREoPAjrQCO49+HVSrFoe5uakFAF5eenS75KbO2vQiLrTMQ== + dependencies: + "@babel/code-frame" "^7.10.4" + "@babel/generator" "^7.12.5" + "@babel/helper-module-transforms" "^7.12.1" + "@babel/helpers" "^7.12.5" + "@babel/parser" "^7.12.7" + "@babel/template" "^7.12.7" + "@babel/traverse" "^7.12.9" + "@babel/types" "^7.12.7" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.1" + json5 "^2.1.2" + lodash "^4.17.19" + resolve "^1.3.2" + semver "^5.4.1" + source-map "^0.5.0" + +"@babel/core@^7.12.16", "@babel/core@^7.12.3", "@babel/core@^7.19.6": + version "7.20.2" + resolved "https://registry.npmmirror.com/@babel/core/-/core-7.20.2.tgz" + integrity sha512-w7DbG8DtMrJcFOi4VrLm+8QM4az8Mo+PuLBKLp2zrYRCow8W/f9xiXm5sN53C8HksCyDQwCKha9JiDoIyPjT2g== + dependencies: + "@ampproject/remapping" "^2.1.0" + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.20.2" + "@babel/helper-compilation-targets" "^7.20.0" + "@babel/helper-module-transforms" "^7.20.2" + "@babel/helpers" "^7.20.1" + "@babel/parser" "^7.20.2" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.20.1" + "@babel/types" "^7.20.2" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.1" + semver "^6.3.0" + +"@babel/generator@^7.12.15", "@babel/generator@^7.12.5", "@babel/generator@^7.20.2", "@babel/generator@^7.23.0": + version "7.23.0" + resolved "https://registry.npmjs.org/@babel/generator/-/generator-7.23.0.tgz" + integrity sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g== + dependencies: + "@babel/types" "^7.23.0" + "@jridgewell/gen-mapping" "^0.3.2" + "@jridgewell/trace-mapping" "^0.3.17" + jsesc "^2.5.1" + +"@babel/helper-annotate-as-pure@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz" + integrity sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-builder-binary-assignment-operator-visitor@^7.18.6": + version "7.18.9" + resolved "https://registry.npmmirror.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.18.9.tgz" + integrity sha512-yFQ0YCHoIqarl8BCRwBL8ulYUaZpz3bNsA7oFepAzee+8/+ImtADXNOmO5vJvsPff3qi+hvpkY/NYBTrBQgdNw== + dependencies: + "@babel/helper-explode-assignable-expression" "^7.18.6" + "@babel/types" "^7.18.9" + +"@babel/helper-compilation-targets@^7.17.7", "@babel/helper-compilation-targets@^7.18.9", "@babel/helper-compilation-targets@^7.20.0": + version "7.20.0" + resolved "https://registry.npmmirror.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.20.0.tgz" + integrity sha512-0jp//vDGp9e8hZzBc6N/KwA5ZK3Wsm/pfm4CrY7vzegkVxc65SgSn6wYOnwHe9Js9HRQ1YTCKLGPzDtaS3RoLQ== + dependencies: + "@babel/compat-data" "^7.20.0" + "@babel/helper-validator-option" "^7.18.6" + browserslist "^4.21.3" + semver "^6.3.0" + +"@babel/helper-create-class-features-plugin@^7.18.6", "@babel/helper-create-class-features-plugin@^7.20.2": + version "7.20.2" + resolved "https://registry.npmmirror.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.20.2.tgz" + integrity sha512-k22GoYRAHPYr9I+Gvy2ZQlAe5mGy8BqWst2wRt8cwIufWTxrsVshhIBvYNqC80N0GSFWTsqRVexOtfzlgOEDvA== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.19.0" + "@babel/helper-member-expression-to-functions" "^7.18.9" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/helper-replace-supers" "^7.19.1" + "@babel/helper-split-export-declaration" "^7.18.6" + +"@babel/helper-create-regexp-features-plugin@^7.18.6", "@babel/helper-create-regexp-features-plugin@^7.19.0": + version "7.19.0" + resolved "https://registry.npmmirror.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.19.0.tgz" + integrity sha512-htnV+mHX32DF81amCDrwIDr8nrp1PTm+3wfBN9/v8QJOLEioOCOG7qNyq0nHeFiWbT3Eb7gsPwEmV64UCQ1jzw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + regexpu-core "^5.1.0" + +"@babel/helper-define-polyfill-provider@^0.3.3": + version "0.3.3" + resolved "https://registry.npmmirror.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.3.tgz" + integrity sha512-z5aQKU4IzbqCC1XH0nAqfsFLMVSo22SBKUc0BxGrLkolTdPTructy0ToNnlO2zA4j9Q/7pjMZf0DSY+DSTYzww== + dependencies: + "@babel/helper-compilation-targets" "^7.17.7" + "@babel/helper-plugin-utils" "^7.16.7" + debug "^4.1.1" + lodash.debounce "^4.0.8" + resolve "^1.14.2" + semver "^6.1.2" + +"@babel/helper-environment-visitor@^7.18.9", "@babel/helper-environment-visitor@^7.22.20": + version "7.22.20" + resolved "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz" + integrity sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA== + +"@babel/helper-explode-assignable-expression@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.18.6.tgz" + integrity sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-function-name@^7.18.9", "@babel/helper-function-name@^7.19.0", "@babel/helper-function-name@^7.23.0": + version "7.23.0" + resolved "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz" + integrity sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw== + dependencies: + "@babel/template" "^7.22.15" + "@babel/types" "^7.23.0" + +"@babel/helper-hoist-variables@^7.18.6", "@babel/helper-hoist-variables@^7.22.5": + version "7.22.5" + resolved "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz" + integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw== + dependencies: + "@babel/types" "^7.22.5" + +"@babel/helper-member-expression-to-functions@^7.18.9": + version "7.18.9" + resolved "https://registry.npmmirror.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.18.9.tgz" + integrity sha512-RxifAh2ZoVU67PyKIO4AMi1wTenGfMR/O/ae0CCRqwgBAt5v7xjdtRw7UoSbsreKrQn5t7r89eruK/9JjYHuDg== + dependencies: + "@babel/types" "^7.18.9" + +"@babel/helper-module-imports@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz" + integrity sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-module-transforms@^7.12.1", "@babel/helper-module-transforms@^7.18.6", "@babel/helper-module-transforms@^7.19.6", "@babel/helper-module-transforms@^7.20.2": + version "7.20.2" + resolved "https://registry.npmmirror.com/@babel/helper-module-transforms/-/helper-module-transforms-7.20.2.tgz" + integrity sha512-zvBKyJXRbmK07XhMuujYoJ48B5yvvmM6+wcpv6Ivj4Yg6qO7NOZOSnvZN9CRl1zz1Z4cKf8YejmCMh8clOoOeA== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-simple-access" "^7.20.2" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/helper-validator-identifier" "^7.19.1" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.20.1" + "@babel/types" "^7.20.2" + +"@babel/helper-optimise-call-expression@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.18.6.tgz" + integrity sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-plugin-utils@7.10.4": + version "7.10.4" + resolved "https://registry.npmmirror.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz" + integrity sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg== + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.16.7", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.18.9", "@babel/helper-plugin-utils@^7.19.0", "@babel/helper-plugin-utils@^7.20.2", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": + version "7.20.2" + resolved "https://registry.npmmirror.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.20.2.tgz" + integrity sha512-8RvlJG2mj4huQ4pZ+rU9lqKi9ZKiRmuvGuM2HlWmkmgOhbs6zEAw6IEiJ5cQqGbDzGZOhwuOQNtZMi/ENLjZoQ== + +"@babel/helper-remap-async-to-generator@^7.18.6", "@babel/helper-remap-async-to-generator@^7.18.9": + version "7.18.9" + resolved "https://registry.npmmirror.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.18.9.tgz" + integrity sha512-dI7q50YKd8BAv3VEfgg7PS7yD3Rtbi2J1XMXaalXO0W0164hYLnh8zpjRS0mte9MfVp/tltvr/cfdXPvJr1opA== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-wrap-function" "^7.18.9" + "@babel/types" "^7.18.9" + +"@babel/helper-replace-supers@^7.18.6", "@babel/helper-replace-supers@^7.19.1": + version "7.19.1" + resolved "https://registry.npmmirror.com/@babel/helper-replace-supers/-/helper-replace-supers-7.19.1.tgz" + integrity sha512-T7ahH7wV0Hfs46SFh5Jz3s0B6+o8g3c+7TMxu7xKfmHikg7EAZ3I2Qk9LFhjxXq8sL7UkP5JflezNwoZa8WvWw== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-member-expression-to-functions" "^7.18.9" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/traverse" "^7.19.1" + "@babel/types" "^7.19.0" + +"@babel/helper-simple-access@^7.19.4", "@babel/helper-simple-access@^7.20.2": + version "7.20.2" + resolved "https://registry.npmmirror.com/@babel/helper-simple-access/-/helper-simple-access-7.20.2.tgz" + integrity sha512-+0woI/WPq59IrqDYbVGfshjT5Dmk/nnbdpcF8SnMhhXObpTq2KNBdLFRFrkVdbDOyUmHBCxzm5FHV1rACIkIbA== + dependencies: + "@babel/types" "^7.20.2" + +"@babel/helper-skip-transparent-expression-wrappers@^7.18.9": + version "7.20.0" + resolved "https://registry.npmmirror.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.20.0.tgz" + integrity sha512-5y1JYeNKfvnT8sZcK9DVRtpTbGiomYIHviSP3OQWmDPU3DeH4a1ZlT/N2lyQ5P8egjcRaT/Y9aNqUxK0WsnIIg== + dependencies: + "@babel/types" "^7.20.0" + +"@babel/helper-split-export-declaration@^7.18.6", "@babel/helper-split-export-declaration@^7.22.6": + version "7.22.6" + resolved "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz" + integrity sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g== + dependencies: + "@babel/types" "^7.22.5" + +"@babel/helper-string-parser@^7.22.5": + version "7.22.5" + resolved "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz" + integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw== + +"@babel/helper-validator-identifier@^7.19.1", "@babel/helper-validator-identifier@^7.22.20": + version "7.22.20" + resolved "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz" + integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== + +"@babel/helper-validator-option@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz" + integrity sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw== + +"@babel/helper-wrap-function@^7.18.9": + version "7.19.0" + resolved "https://registry.npmmirror.com/@babel/helper-wrap-function/-/helper-wrap-function-7.19.0.tgz" + integrity sha512-txX8aN8CZyYGTwcLhlk87KRqncAzhh5TpQamZUa0/u3an36NtDpUP6bQgBCBcLeBs09R/OwQu3OjK0k/HwfNDg== + dependencies: + "@babel/helper-function-name" "^7.19.0" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.0" + "@babel/types" "^7.19.0" + +"@babel/helpers@^7.12.5", "@babel/helpers@^7.20.1": + version "7.20.1" + resolved "https://registry.npmmirror.com/@babel/helpers/-/helpers-7.20.1.tgz" + integrity sha512-J77mUVaDTUJFZ5BpP6mMn6OIl3rEWymk2ZxDBQJUG3P+PbmyMcF3bYWvz0ma69Af1oobDqT/iAsvzhB58xhQUg== + dependencies: + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.20.1" + "@babel/types" "^7.20.0" + +"@babel/highlight@^7.22.13": + version "7.22.20" + resolved "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.20.tgz" + integrity sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg== + dependencies: + "@babel/helper-validator-identifier" "^7.22.20" + chalk "^2.4.2" + js-tokens "^4.0.0" + +"@babel/parser@^7.12.16", "@babel/parser@^7.12.7", "@babel/parser@^7.20.2", "@babel/parser@^7.22.15", "@babel/parser@^7.23.0": + version "7.23.0" + resolved "https://registry.npmjs.org/@babel/parser/-/parser-7.23.0.tgz" + integrity sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw== + +"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz" + integrity sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.18.9": + version "7.18.9" + resolved "https://registry.npmmirror.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.18.9.tgz" + integrity sha512-AHrP9jadvH7qlOj6PINbgSuphjQUAK7AOT7DPjBo9EHoLhQTnnK5u45e1Hd4DbSQEO9nqPWtQ89r+XEOWFScKg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" + "@babel/plugin-proposal-optional-chaining" "^7.18.9" + +"@babel/plugin-proposal-async-generator-functions@^7.20.1": + version "7.20.1" + resolved "https://registry.npmmirror.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.20.1.tgz" + integrity sha512-Gh5rchzSwE4kC+o/6T8waD0WHEQIsDmjltY8WnWRXHUdH8axZhuH86Ov9M72YhJfDrZseQwuuWaaIT/TmePp3g== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-remap-async-to-generator" "^7.18.9" + "@babel/plugin-syntax-async-generators" "^7.8.4" + +"@babel/plugin-proposal-class-properties@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz" + integrity sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-proposal-class-static-block@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.18.6.tgz" + integrity sha512-+I3oIiNxrCpup3Gi8n5IGMwj0gOCAjcJUSQEcotNnCCPMEnixawOQ+KeJPlgfjzx+FKQ1QSyZOWe7wmoJp7vhw== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + +"@babel/plugin-proposal-dynamic-import@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.18.6.tgz" + integrity sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + +"@babel/plugin-proposal-export-namespace-from@^7.18.9": + version "7.18.9" + resolved "https://registry.npmmirror.com/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.9.tgz" + integrity sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + +"@babel/plugin-proposal-json-strings@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.18.6.tgz" + integrity sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-json-strings" "^7.8.3" + +"@babel/plugin-proposal-logical-assignment-operators@^7.18.9": + version "7.18.9" + resolved "https://registry.npmmirror.com/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.18.9.tgz" + integrity sha512-128YbMpjCrP35IOExw2Fq+x55LMP42DzhOhX2aNNIdI9avSWl2PI0yuBWarr3RYpZBSPtabfadkH2yeRiMD61Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + +"@babel/plugin-proposal-nullish-coalescing-operator@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz" + integrity sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + +"@babel/plugin-proposal-numeric-separator@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz" + integrity sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + +"@babel/plugin-proposal-object-rest-spread@7.12.1": + version "7.12.1" + resolved "https://registry.npmmirror.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.12.1.tgz" + integrity sha512-s6SowJIjzlhx8o7lsFx5zmY4At6CTtDvgNQDdPzkBQucle58A6b/TTeEBYtyDgmcXjUTM+vE8YOGHZzzbc/ioA== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-object-rest-spread" "^7.8.0" + "@babel/plugin-transform-parameters" "^7.12.1" + +"@babel/plugin-proposal-object-rest-spread@^7.20.2": + version "7.20.2" + resolved "https://registry.npmmirror.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.20.2.tgz" + integrity sha512-Ks6uej9WFK+fvIMesSqbAto5dD8Dz4VuuFvGJFKgIGSkJuRGcrwGECPA1fDgQK3/DbExBJpEkTeYeB8geIFCSQ== + dependencies: + "@babel/compat-data" "^7.20.1" + "@babel/helper-compilation-targets" "^7.20.0" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-transform-parameters" "^7.20.1" + +"@babel/plugin-proposal-optional-catch-binding@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.18.6.tgz" + integrity sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + +"@babel/plugin-proposal-optional-chaining@^7.18.9": + version "7.18.9" + resolved "https://registry.npmmirror.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.18.9.tgz" + integrity sha512-v5nwt4IqBXihxGsW2QmCWMDS3B3bzGIk/EQVZz2ei7f3NJl8NzAJVvUmpDW5q1CRNY+Beb/k58UAH1Km1N411w== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + +"@babel/plugin-proposal-private-methods@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz" + integrity sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-proposal-private-property-in-object@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.18.6.tgz" + integrity sha512-9Rysx7FOctvT5ouj5JODjAFAkgGoudQuLPamZb0v1TGLpapdNaftzifU8NTWQm0IRjqoYypdrSmyWgkocDQ8Dw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + +"@babel/plugin-proposal-unicode-property-regex@^7.18.6", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.18.6.tgz" + integrity sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-async-generators@^7.8.4": + version "7.8.4" + resolved "https://registry.npmmirror.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz" + integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-class-properties@^7.12.13": + version "7.12.13" + resolved "https://registry.npmmirror.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz" + integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== + dependencies: + "@babel/helper-plugin-utils" "^7.12.13" + +"@babel/plugin-syntax-class-static-block@^7.14.5": + version "7.14.5" + resolved "https://registry.npmmirror.com/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz" + integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-dynamic-import@^7.8.3": + version "7.8.3" + resolved "https://registry.npmmirror.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz" + integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-export-namespace-from@^7.8.3": + version "7.8.3" + resolved "https://registry.npmmirror.com/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz" + integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.3" + +"@babel/plugin-syntax-import-assertions@^7.20.0": + version "7.20.0" + resolved "https://registry.npmmirror.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.20.0.tgz" + integrity sha512-IUh1vakzNoWalR8ch/areW7qFopR2AEw03JlG7BbrDqmQ4X3q9uuipQwSGrUn7oGiemKjtSLDhNtQHzMHr1JdQ== + dependencies: + "@babel/helper-plugin-utils" "^7.19.0" + +"@babel/plugin-syntax-json-strings@^7.8.3": + version "7.8.3" + resolved "https://registry.npmmirror.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz" + integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-jsx@7.12.1": + version "7.12.1" + resolved "https://registry.npmmirror.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.12.1.tgz" + integrity sha512-1yRi7yAtB0ETgxdY9ti/p2TivUxJkTdhu/ZbF9MshVGqOx1TdB3b7xCXs49Fupgg50N45KcAsRP/ZqWjs9SRjg== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-jsx@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz" + integrity sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-logical-assignment-operators@^7.10.4": + version "7.10.4" + resolved "https://registry.npmmirror.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": + version "7.8.3" + resolved "https://registry.npmmirror.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz" + integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-numeric-separator@^7.10.4": + version "7.10.4" + resolved "https://registry.npmmirror.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-object-rest-spread@7.8.3", "@babel/plugin-syntax-object-rest-spread@^7.8.0", "@babel/plugin-syntax-object-rest-spread@^7.8.3": + version "7.8.3" + resolved "https://registry.npmmirror.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.8.3": + version "7.8.3" + resolved "https://registry.npmmirror.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz" + integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-chaining@^7.8.3": + version "7.8.3" + resolved "https://registry.npmmirror.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz" + integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-private-property-in-object@^7.14.5": + version "7.14.5" + resolved "https://registry.npmmirror.com/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz" + integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-top-level-await@^7.14.5": + version "7.14.5" + resolved "https://registry.npmmirror.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz" + integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-typescript@^7.20.0": + version "7.20.0" + resolved "https://registry.npmmirror.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.20.0.tgz" + integrity sha512-rd9TkG+u1CExzS4SM1BlMEhMXwFLKVjOAFFCDx9PbX5ycJWDoWMcwdJH9RhkPu1dOgn5TrxLot/Gx6lWFuAUNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.19.0" + +"@babel/plugin-transform-arrow-functions@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.18.6.tgz" + integrity sha512-9S9X9RUefzrsHZmKMbDXxweEH+YlE8JJEuat9FdvW9Qh1cw7W64jELCtWNkPBPX5En45uy28KGvA/AySqUh8CQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-async-to-generator@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.18.6.tgz" + integrity sha512-ARE5wZLKnTgPW7/1ftQmSi1CmkqqHo2DNmtztFhvgtOWSDfq0Cq9/9L+KnZNYSNrydBekhW3rwShduf59RoXag== + dependencies: + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-remap-async-to-generator" "^7.18.6" + +"@babel/plugin-transform-block-scoped-functions@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.18.6.tgz" + integrity sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-block-scoping@^7.20.2": + version "7.20.2" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.20.2.tgz" + integrity sha512-y5V15+04ry69OV2wULmwhEA6jwSWXO1TwAtIwiPXcvHcoOQUqpyMVd2bDsQJMW8AurjulIyUV8kDqtjSwHy1uQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-classes@^7.20.2": + version "7.20.2" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.20.2.tgz" + integrity sha512-9rbPp0lCVVoagvtEyQKSo5L8oo0nQS/iif+lwlAz29MccX2642vWDlSZK+2T2buxbopotId2ld7zZAzRfz9j1g== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-compilation-targets" "^7.20.0" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.19.0" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-replace-supers" "^7.19.1" + "@babel/helper-split-export-declaration" "^7.18.6" + globals "^11.1.0" + +"@babel/plugin-transform-computed-properties@^7.18.9": + version "7.18.9" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.18.9.tgz" + integrity sha512-+i0ZU1bCDymKakLxn5srGHrsAPRELC2WIbzwjLhHW9SIE1cPYkLCL0NlnXMZaM1vhfgA2+M7hySk42VBvrkBRw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-destructuring@^7.20.2": + version "7.20.2" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.20.2.tgz" + integrity sha512-mENM+ZHrvEgxLTBXUiQ621rRXZes3KWUv6NdQlrnr1TkWVw+hUjQBZuP2X32qKlrlG2BzgR95gkuCRSkJl8vIw== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-dotall-regex@^7.18.6", "@babel/plugin-transform-dotall-regex@^7.4.4": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.18.6.tgz" + integrity sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-duplicate-keys@^7.18.9": + version "7.18.9" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.18.9.tgz" + integrity sha512-d2bmXCtZXYc59/0SanQKbiWINadaJXqtvIQIzd4+hNwkWBgyCd5F/2t1kXoUdvPMrxzPvhK6EMQRROxsue+mfw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-exponentiation-operator@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.18.6.tgz" + integrity sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw== + dependencies: + "@babel/helper-builder-binary-assignment-operator-visitor" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-for-of@^7.18.8": + version "7.18.8" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.18.8.tgz" + integrity sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-function-name@^7.18.9": + version "7.18.9" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.18.9.tgz" + integrity sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ== + dependencies: + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-function-name" "^7.18.9" + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-literals@^7.18.9": + version "7.18.9" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.18.9.tgz" + integrity sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-member-expression-literals@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.18.6.tgz" + integrity sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-modules-amd@^7.19.6": + version "7.19.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.19.6.tgz" + integrity sha512-uG3od2mXvAtIFQIh0xrpLH6r5fpSQN04gIVovl+ODLdUMANokxQLZnPBHcjmv3GxRjnqwLuHvppjjcelqUFZvg== + dependencies: + "@babel/helper-module-transforms" "^7.19.6" + "@babel/helper-plugin-utils" "^7.19.0" + +"@babel/plugin-transform-modules-commonjs@^7.19.6": + version "7.19.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.19.6.tgz" + integrity sha512-8PIa1ym4XRTKuSsOUXqDG0YaOlEuTVvHMe5JCfgBMOtHvJKw/4NGovEGN33viISshG/rZNVrACiBmPQLvWN8xQ== + dependencies: + "@babel/helper-module-transforms" "^7.19.6" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-simple-access" "^7.19.4" + +"@babel/plugin-transform-modules-systemjs@^7.19.6": + version "7.19.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.19.6.tgz" + integrity sha512-fqGLBepcc3kErfR9R3DnVpURmckXP7gj7bAlrTQyBxrigFqszZCkFkcoxzCp2v32XmwXLvbw+8Yq9/b+QqksjQ== + dependencies: + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-module-transforms" "^7.19.6" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-validator-identifier" "^7.19.1" + +"@babel/plugin-transform-modules-umd@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.18.6.tgz" + integrity sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ== + dependencies: + "@babel/helper-module-transforms" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-named-capturing-groups-regex@^7.19.1": + version "7.19.1" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.19.1.tgz" + integrity sha512-oWk9l9WItWBQYS4FgXD4Uyy5kq898lvkXpXQxoJEY1RnvPk4R/Dvu2ebXU9q8lP+rlMwUQTFf2Ok6d78ODa0kw== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.19.0" + "@babel/helper-plugin-utils" "^7.19.0" + +"@babel/plugin-transform-new-target@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.18.6.tgz" + integrity sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-object-super@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.18.6.tgz" + integrity sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-replace-supers" "^7.18.6" + +"@babel/plugin-transform-parameters@^7.12.1", "@babel/plugin-transform-parameters@^7.20.1": + version "7.20.1" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.20.1.tgz" + integrity sha512-nDvKLrAvl+kf6BOy1UJ3MGwzzfTMgppxwiD2Jb4LO3xjYyZq30oQzDNJbCQpMdG9+j2IXHoiMrw5Cm/L6ZoxXQ== + dependencies: + "@babel/helper-plugin-utils" "^7.19.0" + +"@babel/plugin-transform-property-literals@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.18.6.tgz" + integrity sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-react-constant-elements@^7.12.1", "@babel/plugin-transform-react-constant-elements@^7.18.12": + version "7.20.2" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.20.2.tgz" + integrity sha512-KS/G8YI8uwMGKErLFOHS/ekhqdHhpEloxs43NecQHVgo2QuQSyJhGIY1fL8UGl9wy5ItVwwoUL4YxVqsplGq2g== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-react-display-name@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.18.6.tgz" + integrity sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-react-jsx-development@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.18.6.tgz" + integrity sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA== + dependencies: + "@babel/plugin-transform-react-jsx" "^7.18.6" + +"@babel/plugin-transform-react-jsx@^7.18.6": + version "7.19.0" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.19.0.tgz" + integrity sha512-UVEvX3tXie3Szm3emi1+G63jyw1w5IcMY0FSKM+CRnKRI5Mr1YbCNgsSTwoTwKphQEG9P+QqmuRFneJPZuHNhg== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/plugin-syntax-jsx" "^7.18.6" + "@babel/types" "^7.19.0" + +"@babel/plugin-transform-react-pure-annotations@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.18.6.tgz" + integrity sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-regenerator@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.18.6.tgz" + integrity sha512-poqRI2+qiSdeldcz4wTSTXBRryoq3Gc70ye7m7UD5Ww0nE29IXqMl6r7Nd15WBgRd74vloEMlShtH6CKxVzfmQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + regenerator-transform "^0.15.0" + +"@babel/plugin-transform-reserved-words@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.18.6.tgz" + integrity sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-runtime@^7.15.0": + version "7.19.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.19.6.tgz" + integrity sha512-PRH37lz4JU156lYFW1p8OxE5i7d6Sl/zV58ooyr+q1J1lnQPyg5tIiXlIwNVhJaY4W3TmOtdc8jqdXQcB1v5Yw== + dependencies: + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.19.0" + babel-plugin-polyfill-corejs2 "^0.3.3" + babel-plugin-polyfill-corejs3 "^0.6.0" + babel-plugin-polyfill-regenerator "^0.4.1" + semver "^6.3.0" + +"@babel/plugin-transform-shorthand-properties@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.18.6.tgz" + integrity sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-spread@^7.19.0": + version "7.19.0" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.19.0.tgz" + integrity sha512-RsuMk7j6n+r752EtzyScnWkQyuJdli6LdO5Klv8Yx0OfPVTcQkIUfS8clx5e9yHXzlnhOZF3CbQ8C2uP5j074w== + dependencies: + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" + +"@babel/plugin-transform-sticky-regex@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.18.6.tgz" + integrity sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-template-literals@^7.18.9": + version "7.18.9" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.18.9.tgz" + integrity sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-typeof-symbol@^7.18.9": + version "7.18.9" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.18.9.tgz" + integrity sha512-SRfwTtF11G2aemAZWivL7PD+C9z52v9EvMqH9BuYbabyPuKUvSWks3oCg6041pT925L4zVFqaVBeECwsmlguEw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-typescript@^7.18.6": + version "7.20.2" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.20.2.tgz" + integrity sha512-jvS+ngBfrnTUBfOQq8NfGnSbF9BrqlR6hjJ2yVxMkmO5nL/cdifNbI30EfjRlN4g5wYWNnMPyj5Sa6R1pbLeag== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.20.2" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-typescript" "^7.20.0" + +"@babel/plugin-transform-unicode-escapes@^7.18.10": + version "7.18.10" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.18.10.tgz" + integrity sha512-kKAdAI+YzPgGY/ftStBFXTI1LZFju38rYThnfMykS+IXy8BVx+res7s2fxf1l8I35DV2T97ezo6+SGrXz6B3iQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-unicode-regex@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.18.6.tgz" + integrity sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/preset-env@^7.12.1", "@babel/preset-env@^7.15.6", "@babel/preset-env@^7.19.4": + version "7.20.2" + resolved "https://registry.npmmirror.com/@babel/preset-env/-/preset-env-7.20.2.tgz" + integrity sha512-1G0efQEWR1EHkKvKHqbG+IN/QdgwfByUpM5V5QroDzGV2t3S/WXNQd693cHiHTlCFMpr9B6FkPFXDA2lQcKoDg== + dependencies: + "@babel/compat-data" "^7.20.1" + "@babel/helper-compilation-targets" "^7.20.0" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-validator-option" "^7.18.6" + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.18.6" + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.18.9" + "@babel/plugin-proposal-async-generator-functions" "^7.20.1" + "@babel/plugin-proposal-class-properties" "^7.18.6" + "@babel/plugin-proposal-class-static-block" "^7.18.6" + "@babel/plugin-proposal-dynamic-import" "^7.18.6" + "@babel/plugin-proposal-export-namespace-from" "^7.18.9" + "@babel/plugin-proposal-json-strings" "^7.18.6" + "@babel/plugin-proposal-logical-assignment-operators" "^7.18.9" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.18.6" + "@babel/plugin-proposal-numeric-separator" "^7.18.6" + "@babel/plugin-proposal-object-rest-spread" "^7.20.2" + "@babel/plugin-proposal-optional-catch-binding" "^7.18.6" + "@babel/plugin-proposal-optional-chaining" "^7.18.9" + "@babel/plugin-proposal-private-methods" "^7.18.6" + "@babel/plugin-proposal-private-property-in-object" "^7.18.6" + "@babel/plugin-proposal-unicode-property-regex" "^7.18.6" + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-class-properties" "^7.12.13" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + "@babel/plugin-syntax-import-assertions" "^7.20.0" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + "@babel/plugin-syntax-top-level-await" "^7.14.5" + "@babel/plugin-transform-arrow-functions" "^7.18.6" + "@babel/plugin-transform-async-to-generator" "^7.18.6" + "@babel/plugin-transform-block-scoped-functions" "^7.18.6" + "@babel/plugin-transform-block-scoping" "^7.20.2" + "@babel/plugin-transform-classes" "^7.20.2" + "@babel/plugin-transform-computed-properties" "^7.18.9" + "@babel/plugin-transform-destructuring" "^7.20.2" + "@babel/plugin-transform-dotall-regex" "^7.18.6" + "@babel/plugin-transform-duplicate-keys" "^7.18.9" + "@babel/plugin-transform-exponentiation-operator" "^7.18.6" + "@babel/plugin-transform-for-of" "^7.18.8" + "@babel/plugin-transform-function-name" "^7.18.9" + "@babel/plugin-transform-literals" "^7.18.9" + "@babel/plugin-transform-member-expression-literals" "^7.18.6" + "@babel/plugin-transform-modules-amd" "^7.19.6" + "@babel/plugin-transform-modules-commonjs" "^7.19.6" + "@babel/plugin-transform-modules-systemjs" "^7.19.6" + "@babel/plugin-transform-modules-umd" "^7.18.6" + "@babel/plugin-transform-named-capturing-groups-regex" "^7.19.1" + "@babel/plugin-transform-new-target" "^7.18.6" + "@babel/plugin-transform-object-super" "^7.18.6" + "@babel/plugin-transform-parameters" "^7.20.1" + "@babel/plugin-transform-property-literals" "^7.18.6" + "@babel/plugin-transform-regenerator" "^7.18.6" + "@babel/plugin-transform-reserved-words" "^7.18.6" + "@babel/plugin-transform-shorthand-properties" "^7.18.6" + "@babel/plugin-transform-spread" "^7.19.0" + "@babel/plugin-transform-sticky-regex" "^7.18.6" + "@babel/plugin-transform-template-literals" "^7.18.9" + "@babel/plugin-transform-typeof-symbol" "^7.18.9" + "@babel/plugin-transform-unicode-escapes" "^7.18.10" + "@babel/plugin-transform-unicode-regex" "^7.18.6" + "@babel/preset-modules" "^0.1.5" + "@babel/types" "^7.20.2" + babel-plugin-polyfill-corejs2 "^0.3.3" + babel-plugin-polyfill-corejs3 "^0.6.0" + babel-plugin-polyfill-regenerator "^0.4.1" + core-js-compat "^3.25.1" + semver "^6.3.0" + +"@babel/preset-modules@^0.1.5": + version "0.1.5" + resolved "https://registry.npmmirror.com/@babel/preset-modules/-/preset-modules-0.1.5.tgz" + integrity sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" + "@babel/plugin-transform-dotall-regex" "^7.4.4" + "@babel/types" "^7.4.4" + esutils "^2.0.2" + +"@babel/preset-react@^7.12.13", "@babel/preset-react@^7.12.5", "@babel/preset-react@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/preset-react/-/preset-react-7.18.6.tgz" + integrity sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-validator-option" "^7.18.6" + "@babel/plugin-transform-react-display-name" "^7.18.6" + "@babel/plugin-transform-react-jsx" "^7.18.6" + "@babel/plugin-transform-react-jsx-development" "^7.18.6" + "@babel/plugin-transform-react-pure-annotations" "^7.18.6" + +"@babel/preset-typescript@^7.12.16", "@babel/preset-typescript@^7.18.6": + version "7.18.6" + resolved "https://registry.npmmirror.com/@babel/preset-typescript/-/preset-typescript-7.18.6.tgz" + integrity sha512-s9ik86kXBAnD760aybBucdpnLsAt0jK1xqJn2juOn9lkOvSHV60os5hxoVJsPzMQxvnUJFAlkont2DvvaYEBtQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-validator-option" "^7.18.6" + "@babel/plugin-transform-typescript" "^7.18.6" + +"@babel/runtime-corejs3@^7.15.4": + version "7.20.1" + resolved "https://registry.npmmirror.com/@babel/runtime-corejs3/-/runtime-corejs3-7.20.1.tgz" + integrity sha512-CGulbEDcg/ND1Im7fUNRZdGXmX2MTWVVZacQi/6DiKE5HNwZ3aVTm5PV4lO8HHz0B2h8WQyvKKjbX5XgTtydsg== + dependencies: + core-js-pure "^3.25.1" + regenerator-runtime "^0.13.10" + +"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.2", "@babel/runtime@^7.10.3", "@babel/runtime@^7.12.13", "@babel/runtime@^7.15.4", "@babel/runtime@^7.8.4": + version "7.20.1" + resolved "https://registry.npmmirror.com/@babel/runtime/-/runtime-7.20.1.tgz" + integrity sha512-mrzLkl6U9YLF8qpqI7TB82PESyEGjm/0Ly91jG575eVxMMlb8fYfOXFZIJ8XfLrJZQbm7dlKry2bJmXBUEkdFg== + dependencies: + regenerator-runtime "^0.13.10" + +"@babel/template@^7.12.7", "@babel/template@^7.18.10", "@babel/template@^7.22.15": + version "7.22.15" + resolved "https://registry.npmjs.org/@babel/template/-/template-7.22.15.tgz" + integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w== + dependencies: + "@babel/code-frame" "^7.22.13" + "@babel/parser" "^7.22.15" + "@babel/types" "^7.22.15" + +"@babel/traverse@^7.12.13", "@babel/traverse@^7.12.9", "@babel/traverse@^7.19.0", "@babel/traverse@^7.19.1", "@babel/traverse@^7.20.1": + version "7.23.2" + resolved "https://registry.npmjs.org/@babel/traverse/-/traverse-7.23.2.tgz" + integrity sha512-azpe59SQ48qG6nu2CzcMLbxUudtN+dOM9kDbUqGq3HXUJRlo7i8fvPoxQUzYgLZ4cMVmuZgm8vvBpNeRhd6XSw== + dependencies: + "@babel/code-frame" "^7.22.13" + "@babel/generator" "^7.23.0" + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-function-name" "^7.23.0" + "@babel/helper-hoist-variables" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/parser" "^7.23.0" + "@babel/types" "^7.23.0" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/types@^7.12.6", "@babel/types@^7.12.7", "@babel/types@^7.18.6", "@babel/types@^7.18.9", "@babel/types@^7.19.0", "@babel/types@^7.20.0", "@babel/types@^7.20.2", "@babel/types@^7.22.15", "@babel/types@^7.22.5", "@babel/types@^7.23.0", "@babel/types@^7.4.4": + version "7.23.0" + resolved "https://registry.npmjs.org/@babel/types/-/types-7.23.0.tgz" + integrity sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg== + dependencies: + "@babel/helper-string-parser" "^7.22.5" + "@babel/helper-validator-identifier" "^7.22.20" + to-fast-properties "^2.0.0" + +"@docsearch/css@3.3.0": + version "3.3.0" + resolved "https://registry.npmmirror.com/@docsearch/css/-/css-3.3.0.tgz" + integrity sha512-rODCdDtGyudLj+Va8b6w6Y85KE85bXRsps/R4Yjwt5vueXKXZQKYw0aA9knxLBT6a/bI/GMrAcmCR75KYOM6hg== + +"@docsearch/react@^3.0.0-alpha.39": + version "3.3.0" + resolved "https://registry.npmmirror.com/@docsearch/react/-/react-3.3.0.tgz" + integrity sha512-fhS5adZkae2SSdMYEMVg6pxI5a/cE+tW16ki1V0/ur4Fdok3hBRkmN/H8VvlXnxzggkQIIRIVvYPn00JPjen3A== + dependencies: + "@algolia/autocomplete-core" "1.7.2" + "@algolia/autocomplete-preset-algolia" "1.7.2" + "@docsearch/css" "3.3.0" + algoliasearch "^4.0.0" + +"@docusaurus/core@0.0.0-4193": + version "0.0.0-4193" + resolved "https://registry.npmmirror.com/@docusaurus/core/-/core-0.0.0-4193.tgz" + integrity sha512-mC3YLaFgK8JqW3E7b2lCtIlQOVnzqOP0FwtI0+ilkx9v9F+DfgNQzJJ7Kk2RIXeDKu0e2AnjS7YBmRUwpgHRRg== + dependencies: + "@babel/core" "^7.12.16" + "@babel/generator" "^7.12.15" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + "@babel/plugin-transform-runtime" "^7.15.0" + "@babel/preset-env" "^7.15.6" + "@babel/preset-react" "^7.12.13" + "@babel/preset-typescript" "^7.12.16" + "@babel/runtime" "^7.15.4" + "@babel/runtime-corejs3" "^7.15.4" + "@babel/traverse" "^7.12.13" + "@docusaurus/cssnano-preset" "0.0.0-4193" + "@docusaurus/react-loadable" "5.5.2" + "@docusaurus/types" "0.0.0-4193" + "@docusaurus/utils" "0.0.0-4193" + "@docusaurus/utils-common" "0.0.0-4193" + "@docusaurus/utils-validation" "0.0.0-4193" + "@slorber/static-site-generator-webpack-plugin" "^4.0.0" + "@svgr/webpack" "^5.5.0" + autoprefixer "^10.3.5" + babel-loader "^8.2.2" + babel-plugin-dynamic-import-node "2.3.0" + boxen "^5.0.1" + chalk "^4.1.2" + chokidar "^3.5.2" + clean-css "^5.1.5" + commander "^5.1.0" + copy-webpack-plugin "^9.0.1" + core-js "^3.18.0" + css-loader "^5.1.1" + css-minimizer-webpack-plugin "^3.0.2" + cssnano "^5.0.8" + del "^6.0.0" + detect-port "^1.3.0" + escape-html "^1.0.3" + eta "^1.12.3" + file-loader "^6.2.0" + fs-extra "^10.0.0" + github-slugger "^1.4.0" + globby "^11.0.2" + html-minifier-terser "^6.0.2" + html-tags "^3.1.0" + html-webpack-plugin "^5.4.0" + import-fresh "^3.3.0" + is-root "^2.1.0" + leven "^3.1.0" + lodash "^4.17.20" + mini-css-extract-plugin "^1.6.0" + nprogress "^0.2.0" + postcss "^8.3.7" + postcss-loader "^6.1.1" + prompts "^2.4.1" + react-dev-utils "12.0.0-next.47" + react-error-overlay "^6.0.9" + react-helmet "^6.1.0" + react-loadable "npm:@docusaurus/react-loadable@5.5.2" + react-loadable-ssr-addon-v5-slorber "^1.0.1" + react-router "^5.2.0" + react-router-config "^5.1.1" + react-router-dom "^5.2.0" + remark-admonitions "^1.2.1" + resolve-pathname "^3.0.0" + rtl-detect "^1.0.4" + semver "^7.3.4" + serve-handler "^6.1.3" + shelljs "^0.8.4" + std-env "^2.2.1" + strip-ansi "^6.0.0" + terser-webpack-plugin "^5.2.4" + tslib "^2.3.1" + update-notifier "^5.1.0" + url-loader "^4.1.1" + wait-on "^6.0.0" + webpack "^5.61.0" + webpack-bundle-analyzer "^4.4.2" + webpack-dev-server "^4.4.0" + webpack-merge "^5.8.0" + webpackbar "^5.0.0-3" + +"@docusaurus/cssnano-preset@0.0.0-4193": + version "0.0.0-4193" + resolved "https://registry.npmmirror.com/@docusaurus/cssnano-preset/-/cssnano-preset-0.0.0-4193.tgz" + integrity sha512-aB8JNvHU/BW6YnBk7p9HljTOp0DIIVbeCa4WCS3Brp8U6MfM1XlMLqcBUWcCXl4dnvNGhIPKdCDToTdKm0M2MA== + dependencies: + cssnano-preset-advanced "^5.1.4" + postcss "^8.3.7" + postcss-sort-media-queries "^4.1.0" + +"@docusaurus/logger@2.2.0": + version "2.2.0" + resolved "https://registry.npmmirror.com/@docusaurus/logger/-/logger-2.2.0.tgz" + integrity sha512-DF3j1cA5y2nNsu/vk8AG7xwpZu6f5MKkPPMaaIbgXLnWGfm6+wkOeW7kNrxnM95YOhKUkJUophX69nGUnLsm0A== + dependencies: + chalk "^4.1.2" + tslib "^2.4.0" + +"@docusaurus/mdx-loader@0.0.0-4193": + version "0.0.0-4193" + resolved "https://registry.npmmirror.com/@docusaurus/mdx-loader/-/mdx-loader-0.0.0-4193.tgz" + integrity sha512-EN/Q/GDMirYsxdQ+ElhqWCfKQer2MQNrmKUYc+e+4lZeAUDT9E6M7HinGoImtYQbZpN5uSjDnzTYP793DGsMjQ== + dependencies: + "@babel/parser" "^7.12.16" + "@babel/traverse" "^7.12.13" + "@docusaurus/core" "0.0.0-4193" + "@docusaurus/utils" "0.0.0-4193" + "@mdx-js/mdx" "^1.6.21" + "@mdx-js/react" "^1.6.21" + chalk "^4.1.2" + escape-html "^1.0.3" + file-loader "^6.2.0" + fs-extra "^10.0.0" + github-slugger "^1.4.0" + gray-matter "^4.0.3" + mdast-util-to-string "^2.0.0" + remark-emoji "^2.1.0" + stringify-object "^3.3.0" + unist-util-visit "^2.0.2" + url-loader "^4.1.1" + webpack "^5.61.0" + +"@docusaurus/plugin-content-blog@0.0.0-4193": + version "0.0.0-4193" + resolved "https://registry.npmmirror.com/@docusaurus/plugin-content-blog/-/plugin-content-blog-0.0.0-4193.tgz" + integrity sha512-5GLRK3ftr1QmcBEQokn7frauAh6g9Jsc3tQUeMUR2iWZk7C96rAfVMrY3W5Jv+6gfgGzbTG+AKAUD9e3gcGHfg== + dependencies: + "@docusaurus/core" "0.0.0-4193" + "@docusaurus/mdx-loader" "0.0.0-4193" + "@docusaurus/types" "0.0.0-4193" + "@docusaurus/utils" "0.0.0-4193" + "@docusaurus/utils-validation" "0.0.0-4193" + chalk "^4.1.2" + escape-string-regexp "^4.0.0" + feed "^4.2.2" + fs-extra "^10.0.0" + globby "^11.0.2" + js-yaml "^4.0.0" + loader-utils "^2.0.0" + lodash "^4.17.20" + reading-time "^1.5.0" + remark-admonitions "^1.2.1" + tslib "^2.3.1" + utility-types "^3.10.0" + webpack "^5.61.0" + +"@docusaurus/plugin-content-docs@0.0.0-4193": + version "0.0.0-4193" + resolved "https://registry.npmmirror.com/@docusaurus/plugin-content-docs/-/plugin-content-docs-0.0.0-4193.tgz" + integrity sha512-UDccCvOk/vIKtbLwkWz8oI8SWrJXmKrkI1IKO038EfcJT2OAA/Gy96jar659Z2c1bqS9QurJ5M2RHA3JBS6xrg== + dependencies: + "@docusaurus/core" "0.0.0-4193" + "@docusaurus/mdx-loader" "0.0.0-4193" + "@docusaurus/types" "0.0.0-4193" + "@docusaurus/utils" "0.0.0-4193" + "@docusaurus/utils-validation" "0.0.0-4193" + chalk "^4.1.2" + combine-promises "^1.1.0" + escape-string-regexp "^4.0.0" + fs-extra "^10.0.0" + globby "^11.0.2" + import-fresh "^3.2.2" + js-yaml "^4.0.0" + loader-utils "^2.0.0" + lodash "^4.17.20" + remark-admonitions "^1.2.1" + shelljs "^0.8.4" + tslib "^2.3.1" + utility-types "^3.10.0" + webpack "^5.61.0" + +"@docusaurus/plugin-content-pages@0.0.0-4193": + version "0.0.0-4193" + resolved "https://registry.npmmirror.com/@docusaurus/plugin-content-pages/-/plugin-content-pages-0.0.0-4193.tgz" + integrity sha512-E/vP3vGGEWtswFbUco+5elM5O/vch6vB1Lq5Zwq6ybtumalF1kXa67ZN8dlMspI4nhAPv45ZTZPT8UlRLrlqaQ== + dependencies: + "@docusaurus/core" "0.0.0-4193" + "@docusaurus/mdx-loader" "0.0.0-4193" + "@docusaurus/types" "0.0.0-4193" + "@docusaurus/utils" "0.0.0-4193" + "@docusaurus/utils-validation" "0.0.0-4193" + globby "^11.0.2" + lodash "^4.17.20" + remark-admonitions "^1.2.1" + tslib "^2.3.1" + webpack "^5.61.0" + +"@docusaurus/plugin-debug@0.0.0-4193": + version "0.0.0-4193" + resolved "https://registry.npmmirror.com/@docusaurus/plugin-debug/-/plugin-debug-0.0.0-4193.tgz" + integrity sha512-3+rkYliCSVKNaRR/AXatbk4xpvIax7VtEtjGBTz7T6lxo8CujqOcw+j8R2HxlQSC0PgmkKnTLMDP1umZuqnIHg== + dependencies: + "@docusaurus/core" "0.0.0-4193" + "@docusaurus/types" "0.0.0-4193" + "@docusaurus/utils" "0.0.0-4193" + fs-extra "^10.0.0" + react-json-view "^1.21.3" + tslib "^2.3.1" + +"@docusaurus/plugin-google-analytics@0.0.0-4193": + version "0.0.0-4193" + resolved "https://registry.npmmirror.com/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-0.0.0-4193.tgz" + integrity sha512-wWZbr3nSFh7hEWTCmef3eih5S1s4z5+jJTCCZsq+gkHK4b9V9u81BvESB4N7T6dxt9vNEKIoYEb58iuYzUkLbw== + dependencies: + "@docusaurus/core" "0.0.0-4193" + "@docusaurus/utils-validation" "0.0.0-4193" + +"@docusaurus/plugin-google-gtag@0.0.0-4193": + version "0.0.0-4193" + resolved "https://registry.npmmirror.com/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-0.0.0-4193.tgz" + integrity sha512-k7JYw7PvfarFJLDUcSOSzb3MTOL5/yi6ZIs3mTBpAaC7U+MTGVLVejVuoMKAprbtAg2RUZq5unGnBoji7xaDyQ== + dependencies: + "@docusaurus/core" "0.0.0-4193" + "@docusaurus/utils-validation" "0.0.0-4193" + +"@docusaurus/plugin-sitemap@0.0.0-4193": + version "0.0.0-4193" + resolved "https://registry.npmmirror.com/@docusaurus/plugin-sitemap/-/plugin-sitemap-0.0.0-4193.tgz" + integrity sha512-819ln3gj/ozT/BioQl2OrOcRvj5hvxta7h0m7TW/UbAZVx6X9QNAN9TZd49dyuTF/7y3JswU/7BxbeeOT6c/4Q== + dependencies: + "@docusaurus/core" "0.0.0-4193" + "@docusaurus/types" "0.0.0-4193" + "@docusaurus/utils" "0.0.0-4193" + "@docusaurus/utils-common" "0.0.0-4193" + "@docusaurus/utils-validation" "0.0.0-4193" + fs-extra "^10.0.0" + sitemap "^7.0.0" + tslib "^2.3.1" + +"@docusaurus/preset-classic@0.0.0-4193": + version "0.0.0-4193" + resolved "https://registry.npmmirror.com/@docusaurus/preset-classic/-/preset-classic-0.0.0-4193.tgz" + integrity sha512-R4hGzXQvCXVeH6+AbgnXBNoNagZ9Wc3WikS/MUAuu5F1vY44U2nFZo6rCDeJgupAGdqjIicA10acSUVTMslbXw== + dependencies: + "@docusaurus/core" "0.0.0-4193" + "@docusaurus/plugin-content-blog" "0.0.0-4193" + "@docusaurus/plugin-content-docs" "0.0.0-4193" + "@docusaurus/plugin-content-pages" "0.0.0-4193" + "@docusaurus/plugin-debug" "0.0.0-4193" + "@docusaurus/plugin-google-analytics" "0.0.0-4193" + "@docusaurus/plugin-google-gtag" "0.0.0-4193" + "@docusaurus/plugin-sitemap" "0.0.0-4193" + "@docusaurus/theme-classic" "0.0.0-4193" + "@docusaurus/theme-search-algolia" "0.0.0-4193" + +"@docusaurus/react-loadable@5.5.2": + version "5.5.2" + resolved "https://registry.npmmirror.com/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz" + integrity sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ== + dependencies: + "@types/react" "*" + prop-types "^15.6.2" + +"@docusaurus/theme-classic@0.0.0-4193": + version "0.0.0-4193" + resolved "https://registry.npmmirror.com/@docusaurus/theme-classic/-/theme-classic-0.0.0-4193.tgz" + integrity sha512-jPu5EkknsnqAyFNV5AYtM9+GOfZ3l3rVJYiGRNy/v/VvmBTqoWUVXm8Xylf79yMFEOjlGgFObfR58IPThZvvXw== + dependencies: + "@docusaurus/core" "0.0.0-4193" + "@docusaurus/plugin-content-blog" "0.0.0-4193" + "@docusaurus/plugin-content-docs" "0.0.0-4193" + "@docusaurus/plugin-content-pages" "0.0.0-4193" + "@docusaurus/theme-common" "0.0.0-4193" + "@docusaurus/types" "0.0.0-4193" + "@docusaurus/utils" "0.0.0-4193" + "@docusaurus/utils-validation" "0.0.0-4193" + "@mdx-js/mdx" "^1.6.21" + "@mdx-js/react" "^1.6.21" + chalk "^4.1.2" + clsx "^1.1.1" + copy-text-to-clipboard "^3.0.1" + fs-extra "^10.0.0" + globby "^11.0.2" + infima "0.2.0-alpha.34" + lodash "^4.17.20" + postcss "^8.3.7" + prism-react-renderer "^1.2.1" + prismjs "^1.23.0" + prop-types "^15.7.2" + react-router-dom "^5.2.0" + rtlcss "^3.3.0" + +"@docusaurus/theme-common@0.0.0-4193": + version "0.0.0-4193" + resolved "https://registry.npmmirror.com/@docusaurus/theme-common/-/theme-common-0.0.0-4193.tgz" + integrity sha512-6yJEVlKrm9KIgMo1pmsAA5EL7FcrpeMPfkwhChAH0W5s6i4aQENrefQcEXBBBIv1KbNx4uNUG779plvnoSfNWA== + dependencies: + "@docusaurus/core" "0.0.0-4193" + "@docusaurus/plugin-content-blog" "0.0.0-4193" + "@docusaurus/plugin-content-docs" "0.0.0-4193" + "@docusaurus/plugin-content-pages" "0.0.0-4193" + "@docusaurus/types" "0.0.0-4193" + clsx "^1.1.1" + fs-extra "^10.0.0" + parse-numeric-range "^1.3.0" + tslib "^2.3.1" + utility-types "^3.10.0" + +"@docusaurus/theme-search-algolia@0.0.0-4193": + version "0.0.0-4193" + resolved "https://registry.npmmirror.com/@docusaurus/theme-search-algolia/-/theme-search-algolia-0.0.0-4193.tgz" + integrity sha512-we2Z7nhZy5bgoYqF8ZXJO2jt8rBsqnDgXoRPkzoaacQww3CO3+Ez9Q+Dcpxy7lunNY3fShv+9SC13K9yClCUEQ== + dependencies: + "@docsearch/react" "^3.0.0-alpha.39" + "@docusaurus/core" "0.0.0-4193" + "@docusaurus/theme-common" "0.0.0-4193" + "@docusaurus/utils" "0.0.0-4193" + "@docusaurus/utils-validation" "0.0.0-4193" + algoliasearch "^4.10.5" + algoliasearch-helper "^3.5.5" + clsx "^1.1.1" + eta "^1.12.3" + lodash "^4.17.20" + +"@docusaurus/types@0.0.0-4193": + version "0.0.0-4193" + resolved "https://registry.npmmirror.com/@docusaurus/types/-/types-0.0.0-4193.tgz" + integrity sha512-2y+D3yYzEbBAmK74Me4g3pVe1sSRXZDDyKzf/Ojb729F7lYx9dvUTj0I/YlNDcPg5FUKCorfnV+3RfsyDb8lKA== + dependencies: + commander "^5.1.0" + joi "^17.4.2" + querystring "0.2.0" + utility-types "^3.10.0" + webpack "^5.61.0" + webpack-merge "^5.8.0" + +"@docusaurus/utils-common@0.0.0-4193": + version "0.0.0-4193" + resolved "https://registry.npmmirror.com/@docusaurus/utils-common/-/utils-common-0.0.0-4193.tgz" + integrity sha512-31kHFbhubA8cKZIjztNBsIUBbD+gHInHfVuvzBdcOnZgFfTdL9sUrROmjGnAAopAoJ/YyO5bsu5GGnrn/hexDA== + dependencies: + "@docusaurus/types" "0.0.0-4193" + tslib "^2.3.1" + +"@docusaurus/utils-validation@0.0.0-4193": + version "0.0.0-4193" + resolved "https://registry.npmmirror.com/@docusaurus/utils-validation/-/utils-validation-0.0.0-4193.tgz" + integrity sha512-ppVmx3KOHKyxta51O76VAVLttR46ItigEVpbaw5nfxs8muGKXmddWQ2lA78ga1zTDzwLT/7d18kngn46Qva+dw== + dependencies: + "@docusaurus/utils" "0.0.0-4193" + chalk "^4.1.2" + joi "^17.4.2" + tslib "^2.3.1" + +"@docusaurus/utils-validation@^2.0.0-beta.4": + version "2.2.0" + resolved "https://registry.npmmirror.com/@docusaurus/utils-validation/-/utils-validation-2.2.0.tgz" + integrity sha512-I1hcsG3yoCkasOL5qQAYAfnmVoLei7apugT6m4crQjmDGxq+UkiRrq55UqmDDyZlac/6ax/JC0p+usZ6W4nVyg== + dependencies: + "@docusaurus/logger" "2.2.0" + "@docusaurus/utils" "2.2.0" + joi "^17.6.0" + js-yaml "^4.1.0" + tslib "^2.4.0" + +"@docusaurus/utils@0.0.0-4193": + version "0.0.0-4193" + resolved "https://registry.npmmirror.com/@docusaurus/utils/-/utils-0.0.0-4193.tgz" + integrity sha512-c+1c735JzKqE2pRAkHnKyz81a6RE5HcVL3J1tpw9ACKjMyDq1qv3XeYa/ZKv/09qi/FI2QiZ32eiYHMXNZE1Sw== + dependencies: + "@docusaurus/types" "0.0.0-4193" + "@mdx-js/runtime" "^1.6.22" + "@types/github-slugger" "^1.3.0" + chalk "^4.1.2" + escape-string-regexp "^4.0.0" + fs-extra "^10.0.0" + globby "^11.0.4" + gray-matter "^4.0.3" + lodash "^4.17.20" + micromatch "^4.0.4" + remark-mdx-remove-exports "^1.6.22" + remark-mdx-remove-imports "^1.6.22" + resolve-pathname "^3.0.0" + tslib "^2.3.1" + +"@docusaurus/utils@2.2.0", "@docusaurus/utils@^2.0.0-beta.4": + version "2.2.0" + resolved "https://registry.npmmirror.com/@docusaurus/utils/-/utils-2.2.0.tgz" + integrity sha512-oNk3cjvx7Tt1Lgh/aeZAmFpGV2pDr5nHKrBVx6hTkzGhrnMuQqLt6UPlQjdYQ3QHXwyF/ZtZMO1D5Pfi0lu7SA== + dependencies: + "@docusaurus/logger" "2.2.0" + "@svgr/webpack" "^6.2.1" + file-loader "^6.2.0" + fs-extra "^10.1.0" + github-slugger "^1.4.0" + globby "^11.1.0" + gray-matter "^4.0.3" + js-yaml "^4.1.0" + lodash "^4.17.21" + micromatch "^4.0.5" + resolve-pathname "^3.0.0" + shelljs "^0.8.5" + tslib "^2.4.0" + url-loader "^4.1.1" + webpack "^5.73.0" + +"@easyops-cn/autocomplete.js@^0.38.1": + version "0.38.1" + resolved "https://registry.npmmirror.com/@easyops-cn/autocomplete.js/-/autocomplete.js-0.38.1.tgz" + integrity sha512-drg76jS6syilOUmVNkyo1c7ZEBPcPuK+aJA7AksM5ZIIbV57DMHCywiCr+uHyv8BE5jUTU98j/H7gVrkHrWW3Q== + dependencies: + cssesc "^3.0.0" + immediate "^3.2.3" + +"@easyops-cn/docusaurus-search-local@^0.21.1": + version "0.21.4" + resolved "https://registry.npmmirror.com/@easyops-cn/docusaurus-search-local/-/docusaurus-search-local-0.21.4.tgz" + integrity sha512-sUYxRKLfN/rInn1awf3Z6M5lefk9gSsrQp/6nKUTgaJI/NUmvZY8Hk3nRk0BPIHK8jjjzm+gWOhz8O0SBq8ihw== + dependencies: + "@docusaurus/utils" "^2.0.0-beta.4" + "@docusaurus/utils-validation" "^2.0.0-beta.4" + "@easyops-cn/autocomplete.js" "^0.38.1" + cheerio "^1.0.0-rc.3" + clsx "^1.1.1" + debug "^4.2.0" + fs-extra "^9.0.1" + klaw-sync "^6.0.0" + lunr "^2.3.9" + lunr-languages "^1.4.0" + mark.js "^8.11.1" + tslib "^2.2.0" + +"@hapi/hoek@^9.0.0": + version "9.3.0" + resolved "https://registry.npmmirror.com/@hapi/hoek/-/hoek-9.3.0.tgz" + integrity sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ== + +"@hapi/topo@^5.0.0": + version "5.1.0" + resolved "https://registry.npmmirror.com/@hapi/topo/-/topo-5.1.0.tgz" + integrity sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg== + dependencies: + "@hapi/hoek" "^9.0.0" + +"@jridgewell/gen-mapping@^0.1.0": + version "0.1.1" + resolved "https://registry.npmmirror.com/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz" + integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== + dependencies: + "@jridgewell/set-array" "^1.0.0" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jridgewell/gen-mapping@^0.3.2": + version "0.3.2" + resolved "https://registry.npmmirror.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz" + integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/gen-mapping@^0.3.5": + version "0.3.5" + resolved "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz" + integrity sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg== + dependencies: + "@jridgewell/set-array" "^1.2.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.24" + +"@jridgewell/resolve-uri@^3.1.0": + version "3.1.0" + resolved "https://registry.npmmirror.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== + +"@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1", "@jridgewell/set-array@^1.2.1": + version "1.2.1" + resolved "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz" + integrity sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A== + +"@jridgewell/source-map@^0.3.3": + version "0.3.6" + resolved "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.6.tgz" + integrity sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ== + dependencies: + "@jridgewell/gen-mapping" "^0.3.5" + "@jridgewell/trace-mapping" "^0.3.25" + +"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14": + version "1.4.14" + resolved "https://registry.npmmirror.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== + +"@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.20", "@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.25", "@jridgewell/trace-mapping@^0.3.9": + version "0.3.25" + resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz" + integrity sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ== + dependencies: + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" + +"@leichtgewicht/ip-codec@^2.0.1": + version "2.0.4" + resolved "https://registry.npmmirror.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz" + integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A== + +"@mdx-js/mdx@1.6.22", "@mdx-js/mdx@^1.6.21": + version "1.6.22" + resolved "https://registry.npmmirror.com/@mdx-js/mdx/-/mdx-1.6.22.tgz" + integrity sha512-AMxuLxPz2j5/6TpF/XSdKpQP1NlG0z11dFOlq+2IP/lSgl11GY8ji6S/rgsViN/L0BDvHvUMruRb7ub+24LUYA== + dependencies: + "@babel/core" "7.12.9" + "@babel/plugin-syntax-jsx" "7.12.1" + "@babel/plugin-syntax-object-rest-spread" "7.8.3" + "@mdx-js/util" "1.6.22" + babel-plugin-apply-mdx-type-prop "1.6.22" + babel-plugin-extract-import-names "1.6.22" + camelcase-css "2.0.1" + detab "2.0.4" + hast-util-raw "6.0.1" + lodash.uniq "4.5.0" + mdast-util-to-hast "10.0.1" + remark-footnotes "2.0.0" + remark-mdx "1.6.22" + remark-parse "8.0.3" + remark-squeeze-paragraphs "4.0.0" + style-to-object "0.3.0" + unified "9.2.0" + unist-builder "2.0.3" + unist-util-visit "2.0.3" + +"@mdx-js/react@1.6.22", "@mdx-js/react@^1.6.21": + version "1.6.22" + resolved "https://registry.npmmirror.com/@mdx-js/react/-/react-1.6.22.tgz" + integrity sha512-TDoPum4SHdfPiGSAaRBw7ECyI8VaHpK8GJugbJIJuqyh6kzw9ZLJZW3HGL3NNrJGxcAixUvqROm+YuQOo5eXtg== + +"@mdx-js/runtime@^1.6.22": + version "1.6.22" + resolved "https://registry.npmmirror.com/@mdx-js/runtime/-/runtime-1.6.22.tgz" + integrity sha512-p17spaO2+55VLCuxXA3LVHC4phRx60NR2XMdZ+qgVU1lKvEX4y88dmFNOzGDCPLJ03IZyKrJ/rPWWRiBrd9JrQ== + dependencies: + "@mdx-js/mdx" "1.6.22" + "@mdx-js/react" "1.6.22" + buble-jsx-only "^0.19.8" + +"@mdx-js/util@1.6.22": + version "1.6.22" + resolved "https://registry.npmmirror.com/@mdx-js/util/-/util-1.6.22.tgz" + integrity sha512-H1rQc1ZOHANWBvPcW+JpGwr+juXSxM8Q8YCkm3GhZd8REu1fHR3z99CErO1p9pkcfcxZnMdIZdIsXkOHY0NilA== + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.npmmirror.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.npmmirror.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3": + version "1.2.8" + resolved "https://registry.npmmirror.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + +"@polka/url@^1.0.0-next.20": + version "1.0.0-next.21" + resolved "https://registry.npmmirror.com/@polka/url/-/url-1.0.0-next.21.tgz" + integrity sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g== + +"@sideway/address@^4.1.3": + version "4.1.4" + resolved "https://registry.npmmirror.com/@sideway/address/-/address-4.1.4.tgz" + integrity sha512-7vwq+rOHVWjyXxVlR76Agnvhy8I9rpzjosTESvmhNeXOXdZZB15Fl+TI9x1SiHZH5Jv2wTGduSxFDIaq0m3DUw== + dependencies: + "@hapi/hoek" "^9.0.0" + +"@sideway/formula@3.0.1", "@sideway/formula@^3.0.0": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@sideway/formula/-/formula-3.0.1.tgz#80fcbcbaf7ce031e0ef2dd29b1bfc7c3f583611f" + integrity sha512-/poHZJJVjx3L+zVD6g9KgHfYnb443oi7wLu/XKojDviHy6HOEOA6z1Trk5aR1dGcmPenJEgb2sK2I80LeS3MIg== + +"@sideway/pinpoint@^2.0.0": + version "2.0.0" + resolved "https://registry.npmmirror.com/@sideway/pinpoint/-/pinpoint-2.0.0.tgz" + integrity sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ== + +"@sindresorhus/is@^4.0.0": + version "4.6.0" + resolved "https://registry.npmmirror.com/@sindresorhus/is/-/is-4.6.0.tgz" + integrity sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw== + +"@slorber/static-site-generator-webpack-plugin@^4.0.0": + version "4.0.7" + resolved "https://registry.npmmirror.com/@slorber/static-site-generator-webpack-plugin/-/static-site-generator-webpack-plugin-4.0.7.tgz" + integrity sha512-Ug7x6z5lwrz0WqdnNFOMYrDQNTPAprvHLSh6+/fmml3qUiz6l5eq+2MzLKWtn/q5K5NpSiFsZTP/fck/3vjSxA== + dependencies: + eval "^0.1.8" + p-map "^4.0.0" + webpack-sources "^3.2.2" + +"@svgr/babel-plugin-add-jsx-attribute@^5.4.0": + version "5.4.0" + resolved "https://registry.npmmirror.com/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-5.4.0.tgz" + integrity sha512-ZFf2gs/8/6B8PnSofI0inYXr2SDNTDScPXhN7k5EqD4aZ3gi6u+rbmZHVB8IM3wDyx8ntKACZbtXSm7oZGRqVg== + +"@svgr/babel-plugin-add-jsx-attribute@^6.5.1": + version "6.5.1" + resolved "https://registry.npmmirror.com/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-6.5.1.tgz" + integrity sha512-9PYGcXrAxitycIjRmZB+Q0JaN07GZIWaTBIGQzfaZv+qr1n8X1XUEJ5rZ/vx6OVD9RRYlrNnXWExQXcmZeD/BQ== + +"@svgr/babel-plugin-remove-jsx-attribute@*", "@svgr/babel-plugin-remove-jsx-attribute@^5.4.0": + version "5.4.0" + resolved "https://registry.npmmirror.com/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-5.4.0.tgz" + integrity sha512-yaS4o2PgUtwLFGTKbsiAy6D0o3ugcUhWK0Z45umJ66EPWunAz9fuFw2gJuje6wqQvQWOTJvIahUwndOXb7QCPg== + +"@svgr/babel-plugin-remove-jsx-empty-expression@*", "@svgr/babel-plugin-remove-jsx-empty-expression@^5.0.1": + version "5.0.1" + resolved "https://registry.npmmirror.com/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-5.0.1.tgz" + integrity sha512-LA72+88A11ND/yFIMzyuLRSMJ+tRKeYKeQ+mR3DcAZ5I4h5CPWN9AHyUzJbWSYp/u2u0xhmgOe0+E41+GjEueA== + +"@svgr/babel-plugin-replace-jsx-attribute-value@^5.0.1": + version "5.0.1" + resolved "https://registry.npmmirror.com/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-5.0.1.tgz" + integrity sha512-PoiE6ZD2Eiy5mK+fjHqwGOS+IXX0wq/YDtNyIgOrc6ejFnxN4b13pRpiIPbtPwHEc+NT2KCjteAcq33/F1Y9KQ== + +"@svgr/babel-plugin-replace-jsx-attribute-value@^6.5.1": + version "6.5.1" + resolved "https://registry.npmmirror.com/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-6.5.1.tgz" + integrity sha512-8DPaVVE3fd5JKuIC29dqyMB54sA6mfgki2H2+swh+zNJoynC8pMPzOkidqHOSc6Wj032fhl8Z0TVn1GiPpAiJg== + +"@svgr/babel-plugin-svg-dynamic-title@^5.4.0": + version "5.4.0" + resolved "https://registry.npmmirror.com/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-5.4.0.tgz" + integrity sha512-zSOZH8PdZOpuG1ZVx/cLVePB2ibo3WPpqo7gFIjLV9a0QsuQAzJiwwqmuEdTaW2pegyBE17Uu15mOgOcgabQZg== + +"@svgr/babel-plugin-svg-dynamic-title@^6.5.1": + version "6.5.1" + resolved "https://registry.npmmirror.com/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-6.5.1.tgz" + integrity sha512-FwOEi0Il72iAzlkaHrlemVurgSQRDFbk0OC8dSvD5fSBPHltNh7JtLsxmZUhjYBZo2PpcU/RJvvi6Q0l7O7ogw== + +"@svgr/babel-plugin-svg-em-dimensions@^5.4.0": + version "5.4.0" + resolved "https://registry.npmmirror.com/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-5.4.0.tgz" + integrity sha512-cPzDbDA5oT/sPXDCUYoVXEmm3VIoAWAPT6mSPTJNbQaBNUuEKVKyGH93oDY4e42PYHRW67N5alJx/eEol20abw== + +"@svgr/babel-plugin-svg-em-dimensions@^6.5.1": + version "6.5.1" + resolved "https://registry.npmmirror.com/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-6.5.1.tgz" + integrity sha512-gWGsiwjb4tw+ITOJ86ndY/DZZ6cuXMNE/SjcDRg+HLuCmwpcjOktwRF9WgAiycTqJD/QXqL2f8IzE2Rzh7aVXA== + +"@svgr/babel-plugin-transform-react-native-svg@^5.4.0": + version "5.4.0" + resolved "https://registry.npmmirror.com/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-5.4.0.tgz" + integrity sha512-3eYP/SaopZ41GHwXma7Rmxcv9uRslRDTY1estspeB1w1ueZWd/tPlMfEOoccYpEMZU3jD4OU7YitnXcF5hLW2Q== + +"@svgr/babel-plugin-transform-react-native-svg@^6.5.1": + version "6.5.1" + resolved "https://registry.npmmirror.com/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-6.5.1.tgz" + integrity sha512-2jT3nTayyYP7kI6aGutkyfJ7UMGtuguD72OjeGLwVNyfPRBD8zQthlvL+fAbAKk5n9ZNcvFkp/b1lZ7VsYqVJg== + +"@svgr/babel-plugin-transform-svg-component@^5.5.0": + version "5.5.0" + resolved "https://registry.npmmirror.com/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-5.5.0.tgz" + integrity sha512-q4jSH1UUvbrsOtlo/tKcgSeiCHRSBdXoIoqX1pgcKK/aU3JD27wmMKwGtpB8qRYUYoyXvfGxUVKchLuR5pB3rQ== + +"@svgr/babel-plugin-transform-svg-component@^6.5.1": + version "6.5.1" + resolved "https://registry.npmmirror.com/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-6.5.1.tgz" + integrity sha512-a1p6LF5Jt33O3rZoVRBqdxL350oge54iZWHNI6LJB5tQ7EelvD/Mb1mfBiZNAan0dt4i3VArkFRjA4iObuNykQ== + +"@svgr/babel-preset@^5.5.0": + version "5.5.0" + resolved "https://registry.npmmirror.com/@svgr/babel-preset/-/babel-preset-5.5.0.tgz" + integrity sha512-4FiXBjvQ+z2j7yASeGPEi8VD/5rrGQk4Xrq3EdJmoZgz/tpqChpo5hgXDvmEauwtvOc52q8ghhZK4Oy7qph4ig== + dependencies: + "@svgr/babel-plugin-add-jsx-attribute" "^5.4.0" + "@svgr/babel-plugin-remove-jsx-attribute" "^5.4.0" + "@svgr/babel-plugin-remove-jsx-empty-expression" "^5.0.1" + "@svgr/babel-plugin-replace-jsx-attribute-value" "^5.0.1" + "@svgr/babel-plugin-svg-dynamic-title" "^5.4.0" + "@svgr/babel-plugin-svg-em-dimensions" "^5.4.0" + "@svgr/babel-plugin-transform-react-native-svg" "^5.4.0" + "@svgr/babel-plugin-transform-svg-component" "^5.5.0" + +"@svgr/babel-preset@^6.5.1": + version "6.5.1" + resolved "https://registry.npmmirror.com/@svgr/babel-preset/-/babel-preset-6.5.1.tgz" + integrity sha512-6127fvO/FF2oi5EzSQOAjo1LE3OtNVh11R+/8FXa+mHx1ptAaS4cknIjnUA7e6j6fwGGJ17NzaTJFUwOV2zwCw== + dependencies: + "@svgr/babel-plugin-add-jsx-attribute" "^6.5.1" + "@svgr/babel-plugin-remove-jsx-attribute" "*" + "@svgr/babel-plugin-remove-jsx-empty-expression" "*" + "@svgr/babel-plugin-replace-jsx-attribute-value" "^6.5.1" + "@svgr/babel-plugin-svg-dynamic-title" "^6.5.1" + "@svgr/babel-plugin-svg-em-dimensions" "^6.5.1" + "@svgr/babel-plugin-transform-react-native-svg" "^6.5.1" + "@svgr/babel-plugin-transform-svg-component" "^6.5.1" + +"@svgr/core@^5.5.0": + version "5.5.0" + resolved "https://registry.npmmirror.com/@svgr/core/-/core-5.5.0.tgz" + integrity sha512-q52VOcsJPvV3jO1wkPtzTuKlvX7Y3xIcWRpCMtBF3MrteZJtBfQw/+u0B1BHy5ColpQc1/YVTrPEtSYIMNZlrQ== + dependencies: + "@svgr/plugin-jsx" "^5.5.0" + camelcase "^6.2.0" + cosmiconfig "^7.0.0" + +"@svgr/core@^6.5.1": + version "6.5.1" + resolved "https://registry.npmmirror.com/@svgr/core/-/core-6.5.1.tgz" + integrity sha512-/xdLSWxK5QkqG524ONSjvg3V/FkNyCv538OIBdQqPNaAta3AsXj/Bd2FbvR87yMbXO2hFSWiAe/Q6IkVPDw+mw== + dependencies: + "@babel/core" "^7.19.6" + "@svgr/babel-preset" "^6.5.1" + "@svgr/plugin-jsx" "^6.5.1" + camelcase "^6.2.0" + cosmiconfig "^7.0.1" + +"@svgr/hast-util-to-babel-ast@^5.5.0": + version "5.5.0" + resolved "https://registry.npmmirror.com/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-5.5.0.tgz" + integrity sha512-cAaR/CAiZRB8GP32N+1jocovUtvlj0+e65TB50/6Lcime+EA49m/8l+P2ko+XPJ4dw3xaPS3jOL4F2X4KWxoeQ== + dependencies: + "@babel/types" "^7.12.6" + +"@svgr/hast-util-to-babel-ast@^6.5.1": + version "6.5.1" + resolved "https://registry.npmmirror.com/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-6.5.1.tgz" + integrity sha512-1hnUxxjd83EAxbL4a0JDJoD3Dao3hmjvyvyEV8PzWmLK3B9m9NPlW7GKjFyoWE8nM7HnXzPcmmSyOW8yOddSXw== + dependencies: + "@babel/types" "^7.20.0" + entities "^4.4.0" + +"@svgr/plugin-jsx@^5.5.0": + version "5.5.0" + resolved "https://registry.npmmirror.com/@svgr/plugin-jsx/-/plugin-jsx-5.5.0.tgz" + integrity sha512-V/wVh33j12hGh05IDg8GpIUXbjAPnTdPTKuP4VNLggnwaHMPNQNae2pRnyTAILWCQdz5GyMqtO488g7CKM8CBA== + dependencies: + "@babel/core" "^7.12.3" + "@svgr/babel-preset" "^5.5.0" + "@svgr/hast-util-to-babel-ast" "^5.5.0" + svg-parser "^2.0.2" + +"@svgr/plugin-jsx@^6.5.1": + version "6.5.1" + resolved "https://registry.npmmirror.com/@svgr/plugin-jsx/-/plugin-jsx-6.5.1.tgz" + integrity sha512-+UdQxI3jgtSjCykNSlEMuy1jSRQlGC7pqBCPvkG/2dATdWo082zHTTK3uhnAju2/6XpE6B5mZ3z4Z8Ns01S8Gw== + dependencies: + "@babel/core" "^7.19.6" + "@svgr/babel-preset" "^6.5.1" + "@svgr/hast-util-to-babel-ast" "^6.5.1" + svg-parser "^2.0.4" + +"@svgr/plugin-svgo@^5.5.0": + version "5.5.0" + resolved "https://registry.npmmirror.com/@svgr/plugin-svgo/-/plugin-svgo-5.5.0.tgz" + integrity sha512-r5swKk46GuQl4RrVejVwpeeJaydoxkdwkM1mBKOgJLBUJPGaLci6ylg/IjhrRsREKDkr4kbMWdgOtbXEh0fyLQ== + dependencies: + cosmiconfig "^7.0.0" + deepmerge "^4.2.2" + svgo "^1.2.2" + +"@svgr/plugin-svgo@^6.5.1": + version "6.5.1" + resolved "https://registry.npmmirror.com/@svgr/plugin-svgo/-/plugin-svgo-6.5.1.tgz" + integrity sha512-omvZKf8ixP9z6GWgwbtmP9qQMPX4ODXi+wzbVZgomNFsUIlHA1sf4fThdwTWSsZGgvGAG6yE+b/F5gWUkcZ/iQ== + dependencies: + cosmiconfig "^7.0.1" + deepmerge "^4.2.2" + svgo "^2.8.0" + +"@svgr/webpack@^5.5.0": + version "5.5.0" + resolved "https://registry.npmmirror.com/@svgr/webpack/-/webpack-5.5.0.tgz" + integrity sha512-DOBOK255wfQxguUta2INKkzPj6AIS6iafZYiYmHn6W3pHlycSRRlvWKCfLDG10fXfLWqE3DJHgRUOyJYmARa7g== + dependencies: + "@babel/core" "^7.12.3" + "@babel/plugin-transform-react-constant-elements" "^7.12.1" + "@babel/preset-env" "^7.12.1" + "@babel/preset-react" "^7.12.5" + "@svgr/core" "^5.5.0" + "@svgr/plugin-jsx" "^5.5.0" + "@svgr/plugin-svgo" "^5.5.0" + loader-utils "^2.0.0" + +"@svgr/webpack@^6.2.1": + version "6.5.1" + resolved "https://registry.npmmirror.com/@svgr/webpack/-/webpack-6.5.1.tgz" + integrity sha512-cQ/AsnBkXPkEK8cLbv4Dm7JGXq2XrumKnL1dRpJD9rIO2fTIlJI9a1uCciYG1F2aUsox/hJQyNGbt3soDxSRkA== + dependencies: + "@babel/core" "^7.19.6" + "@babel/plugin-transform-react-constant-elements" "^7.18.12" + "@babel/preset-env" "^7.19.4" + "@babel/preset-react" "^7.18.6" + "@babel/preset-typescript" "^7.18.6" + "@svgr/core" "^6.5.1" + "@svgr/plugin-jsx" "^6.5.1" + "@svgr/plugin-svgo" "^6.5.1" + +"@szmarczak/http-timer@^4.0.5": + version "4.0.6" + resolved "https://registry.npmmirror.com/@szmarczak/http-timer/-/http-timer-4.0.6.tgz" + integrity sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w== + dependencies: + defer-to-connect "^2.0.0" + +"@trysound/sax@0.2.0": + version "0.2.0" + resolved "https://registry.npmmirror.com/@trysound/sax/-/sax-0.2.0.tgz" + integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA== + +"@types/body-parser@*": + version "1.19.2" + resolved "https://registry.npmmirror.com/@types/body-parser/-/body-parser-1.19.2.tgz" + integrity sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g== + dependencies: + "@types/connect" "*" + "@types/node" "*" + +"@types/bonjour@^3.5.9": + version "3.5.10" + resolved "https://registry.npmmirror.com/@types/bonjour/-/bonjour-3.5.10.tgz" + integrity sha512-p7ienRMiS41Nu2/igbJxxLDWrSZ0WxM8UQgCeO9KhoVF7cOVFkrKsiDr1EsJIla8vV3oEEjGcz11jc5yimhzZw== + dependencies: + "@types/node" "*" + +"@types/cacheable-request@^6.0.1": + version "6.0.2" + resolved "https://registry.npmmirror.com/@types/cacheable-request/-/cacheable-request-6.0.2.tgz" + integrity sha512-B3xVo+dlKM6nnKTcmm5ZtY/OL8bOAOd2Olee9M1zft65ox50OzjEHW91sDiU9j6cvW8Ejg1/Qkf4xd2kugApUA== + dependencies: + "@types/http-cache-semantics" "*" + "@types/keyv" "*" + "@types/node" "*" + "@types/responselike" "*" + +"@types/connect-history-api-fallback@^1.3.5": + version "1.3.5" + resolved "https://registry.npmmirror.com/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.3.5.tgz" + integrity sha512-h8QJa8xSb1WD4fpKBDcATDNGXghFj6/3GRWG6dhmRcu0RX1Ubasur2Uvx5aeEwlf0MwblEC2bMzzMQntxnw/Cw== + dependencies: + "@types/express-serve-static-core" "*" + "@types/node" "*" + +"@types/connect@*": + version "3.4.35" + resolved "https://registry.npmmirror.com/@types/connect/-/connect-3.4.35.tgz" + integrity sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ== + dependencies: + "@types/node" "*" + +"@types/eslint-scope@^3.7.3": + version "3.7.4" + resolved "https://registry.npmmirror.com/@types/eslint-scope/-/eslint-scope-3.7.4.tgz" + integrity sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA== + dependencies: + "@types/eslint" "*" + "@types/estree" "*" + +"@types/eslint@*": + version "8.4.10" + resolved "https://registry.npmmirror.com/@types/eslint/-/eslint-8.4.10.tgz" + integrity sha512-Sl/HOqN8NKPmhWo2VBEPm0nvHnu2LL3v9vKo8MEq0EtbJ4eVzGPl41VNPvn5E1i5poMk4/XD8UriLHpJvEP/Nw== + dependencies: + "@types/estree" "*" + "@types/json-schema" "*" + +"@types/estree@*", "@types/estree@^1.0.5": + version "1.0.5" + resolved "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz" + integrity sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw== + +"@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.18": + version "4.17.31" + resolved "https://registry.npmmirror.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.31.tgz" + integrity sha512-DxMhY+NAsTwMMFHBTtJFNp5qiHKJ7TeqOo23zVEM9alT1Ml27Q3xcTH0xwxn7Q0BbMcVEJOs/7aQtUWupUQN3Q== + dependencies: + "@types/node" "*" + "@types/qs" "*" + "@types/range-parser" "*" + +"@types/express@*", "@types/express@^4.17.13": + version "4.17.14" + resolved "https://registry.npmmirror.com/@types/express/-/express-4.17.14.tgz" + integrity sha512-TEbt+vaPFQ+xpxFLFssxUDXj5cWCxZJjIcB7Yg0k0GMHGtgtQgpvx/MUQUeAkNbA9AAGrwkAsoeItdTgS7FMyg== + dependencies: + "@types/body-parser" "*" + "@types/express-serve-static-core" "^4.17.18" + "@types/qs" "*" + "@types/serve-static" "*" + +"@types/github-slugger@^1.3.0": + version "1.3.0" + resolved "https://registry.npmmirror.com/@types/github-slugger/-/github-slugger-1.3.0.tgz" + integrity sha512-J/rMZa7RqiH/rT29TEVZO4nBoDP9XJOjnbbIofg7GQKs4JIduEO3WLpte+6WeUz/TcrXKlY+bM7FYrp8yFB+3g== + +"@types/hast@^2.0.0": + version "2.3.4" + resolved "https://registry.npmmirror.com/@types/hast/-/hast-2.3.4.tgz" + integrity sha512-wLEm0QvaoawEDoTRwzTXp4b4jpwiJDvR5KMnFnVodm3scufTlBOWRD6N1OBf9TZMhjlNsSfcO5V+7AF4+Vy+9g== + dependencies: + "@types/unist" "*" + +"@types/html-minifier-terser@^6.0.0": + version "6.1.0" + resolved "https://registry.npmmirror.com/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz" + integrity sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg== + +"@types/http-cache-semantics@*": + version "4.0.1" + resolved "https://registry.npmmirror.com/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz" + integrity sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ== + +"@types/http-proxy@^1.17.8": + version "1.17.9" + resolved "https://registry.npmmirror.com/@types/http-proxy/-/http-proxy-1.17.9.tgz" + integrity sha512-QsbSjA/fSk7xB+UXlCT3wHBy5ai9wOcNDWwZAtud+jXhwOM3l+EYZh8Lng4+/6n8uar0J7xILzqftJdJ/Wdfkw== + dependencies: + "@types/node" "*" + +"@types/json-schema@*", "@types/json-schema@^7.0.4", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": + version "7.0.11" + resolved "https://registry.npmmirror.com/@types/json-schema/-/json-schema-7.0.11.tgz" + integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== + +"@types/katex@^0.11.0": + version "0.11.1" + resolved "https://registry.npmmirror.com/@types/katex/-/katex-0.11.1.tgz" + integrity sha512-DUlIj2nk0YnJdlWgsFuVKcX27MLW0KbKmGVoUHmFr+74FYYNUDAaj9ZqTADvsbE8rfxuVmSFc7KczYn5Y09ozg== + +"@types/keyv@*": + version "4.2.0" + resolved "https://registry.npmmirror.com/@types/keyv/-/keyv-4.2.0.tgz" + integrity sha512-xoBtGl5R9jeKUhc8ZqeYaRDx04qqJ10yhhXYGmJ4Jr8qKpvMsDQQrNUvF/wUJ4klOtmJeJM+p2Xo3zp9uaC3tw== + dependencies: + keyv "*" + +"@types/mdast@^3.0.0": + version "3.0.10" + resolved "https://registry.npmmirror.com/@types/mdast/-/mdast-3.0.10.tgz" + integrity sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA== + dependencies: + "@types/unist" "*" + +"@types/mime@*": + version "3.0.1" + resolved "https://registry.npmmirror.com/@types/mime/-/mime-3.0.1.tgz" + integrity sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA== + +"@types/node@*": + version "18.11.9" + resolved "https://registry.npmmirror.com/@types/node/-/node-18.11.9.tgz" + integrity sha512-CRpX21/kGdzjOpFsZSkcrXMGIBWMGNIHXXBVFSH+ggkftxg+XYP20TESbh+zFvFj3EQOl5byk0HTRn1IL6hbqg== + +"@types/node@^17.0.5": + version "17.0.45" + resolved "https://registry.npmmirror.com/@types/node/-/node-17.0.45.tgz" + integrity sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw== + +"@types/parse-json@^4.0.0": + version "4.0.0" + resolved "https://registry.npmmirror.com/@types/parse-json/-/parse-json-4.0.0.tgz" + integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== + +"@types/parse5@^5.0.0": + version "5.0.3" + resolved "https://registry.npmmirror.com/@types/parse5/-/parse5-5.0.3.tgz" + integrity sha512-kUNnecmtkunAoQ3CnjmMkzNU/gtxG8guhi+Fk2U/kOpIKjIMKnXGp4IJCgQJrXSgMsWYimYG4TGjz/UzbGEBTw== + +"@types/prop-types@*": + version "15.7.5" + resolved "https://registry.npmmirror.com/@types/prop-types/-/prop-types-15.7.5.tgz" + integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== + +"@types/q@^1.5.1": + version "1.5.5" + resolved "https://registry.npmmirror.com/@types/q/-/q-1.5.5.tgz" + integrity sha512-L28j2FcJfSZOnL1WBjDYp2vUHCeIFlyYI/53EwD/rKUBQ7MtUUfbQWiyKJGpcnv4/WgrhWsFKrcPstcAt/J0tQ== + +"@types/qs@*": + version "6.9.7" + resolved "https://registry.npmmirror.com/@types/qs/-/qs-6.9.7.tgz" + integrity sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw== + +"@types/range-parser@*": + version "1.2.4" + resolved "https://registry.npmmirror.com/@types/range-parser/-/range-parser-1.2.4.tgz" + integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw== + +"@types/react@*": + version "18.0.25" + resolved "https://registry.npmmirror.com/@types/react/-/react-18.0.25.tgz" + integrity sha512-xD6c0KDT4m7n9uD4ZHi02lzskaiqcBxf4zi+tXZY98a04wvc0hi/TcCPC2FOESZi51Nd7tlUeOJY8RofL799/g== + dependencies: + "@types/prop-types" "*" + "@types/scheduler" "*" + csstype "^3.0.2" + +"@types/responselike@*", "@types/responselike@^1.0.0": + version "1.0.0" + resolved "https://registry.npmmirror.com/@types/responselike/-/responselike-1.0.0.tgz" + integrity sha512-85Y2BjiufFzaMIlvJDvTTB8Fxl2xfLo4HgmHzVBz08w4wDePCTjYw66PdrolO0kzli3yam/YCgRufyo1DdQVTA== + dependencies: + "@types/node" "*" + +"@types/retry@0.12.0": + version "0.12.0" + resolved "https://registry.npmmirror.com/@types/retry/-/retry-0.12.0.tgz" + integrity sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA== + +"@types/sax@^1.2.1": + version "1.2.4" + resolved "https://registry.npmmirror.com/@types/sax/-/sax-1.2.4.tgz" + integrity sha512-pSAff4IAxJjfAXUG6tFkO7dsSbTmf8CtUpfhhZ5VhkRpC4628tJhh3+V6H1E+/Gs9piSzYKT5yzHO5M4GG9jkw== + dependencies: + "@types/node" "*" + +"@types/scheduler@*": + version "0.16.2" + resolved "https://registry.npmmirror.com/@types/scheduler/-/scheduler-0.16.2.tgz" + integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew== + +"@types/serve-index@^1.9.1": + version "1.9.1" + resolved "https://registry.npmmirror.com/@types/serve-index/-/serve-index-1.9.1.tgz" + integrity sha512-d/Hs3nWDxNL2xAczmOVZNj92YZCS6RGxfBPjKzuu/XirCgXdpKEb88dYNbrYGint6IVWLNP+yonwVAuRC0T2Dg== + dependencies: + "@types/express" "*" + +"@types/serve-static@*", "@types/serve-static@^1.13.10": + version "1.15.0" + resolved "https://registry.npmmirror.com/@types/serve-static/-/serve-static-1.15.0.tgz" + integrity sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg== + dependencies: + "@types/mime" "*" + "@types/node" "*" + +"@types/sockjs@^0.3.33": + version "0.3.33" + resolved "https://registry.npmmirror.com/@types/sockjs/-/sockjs-0.3.33.tgz" + integrity sha512-f0KEEe05NvUnat+boPTZ0dgaLZ4SfSouXUgv5noUiefG2ajgKjmETo9ZJyuqsl7dfl2aHlLJUiki6B4ZYldiiw== + dependencies: + "@types/node" "*" + +"@types/unist@*", "@types/unist@^2.0.0", "@types/unist@^2.0.2", "@types/unist@^2.0.3": + version "2.0.6" + resolved "https://registry.npmmirror.com/@types/unist/-/unist-2.0.6.tgz" + integrity sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ== + +"@types/ws@^8.5.1": + version "8.5.3" + resolved "https://registry.npmmirror.com/@types/ws/-/ws-8.5.3.tgz" + integrity sha512-6YOoWjruKj1uLf3INHH7D3qTXwFfEsg1kf3c0uDdSBJwfa/llkwIjrAGV7j7mVgGNbzTQ3HiHKKDXl6bJPD97w== + dependencies: + "@types/node" "*" + +"@webassemblyjs/ast@1.12.1", "@webassemblyjs/ast@^1.12.1": + version "1.12.1" + resolved "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.12.1.tgz" + integrity sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg== + dependencies: + "@webassemblyjs/helper-numbers" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + +"@webassemblyjs/floating-point-hex-parser@1.11.6": + version "1.11.6" + resolved "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz" + integrity sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw== + +"@webassemblyjs/helper-api-error@1.11.6": + version "1.11.6" + resolved "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz" + integrity sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q== + +"@webassemblyjs/helper-buffer@1.12.1": + version "1.12.1" + resolved "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.12.1.tgz" + integrity sha512-nzJwQw99DNDKr9BVCOZcLuJJUlqkJh+kVzVl6Fmq/tI5ZtEyWT1KZMyOXltXLZJmDtvLCDgwsyrkohEtopTXCw== + +"@webassemblyjs/helper-numbers@1.11.6": + version "1.11.6" + resolved "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz" + integrity sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g== + dependencies: + "@webassemblyjs/floating-point-hex-parser" "1.11.6" + "@webassemblyjs/helper-api-error" "1.11.6" + "@xtuc/long" "4.2.2" + +"@webassemblyjs/helper-wasm-bytecode@1.11.6": + version "1.11.6" + resolved "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz" + integrity sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA== + +"@webassemblyjs/helper-wasm-section@1.12.1": + version "1.12.1" + resolved "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.12.1.tgz" + integrity sha512-Jif4vfB6FJlUlSbgEMHUyk1j234GTNG9dBJ4XJdOySoj518Xj0oGsNi59cUQF4RRMS9ouBUxDDdyBVfPTypa5g== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/wasm-gen" "1.12.1" + +"@webassemblyjs/ieee754@1.11.6": + version "1.11.6" + resolved "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz" + integrity sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg== + dependencies: + "@xtuc/ieee754" "^1.2.0" + +"@webassemblyjs/leb128@1.11.6": + version "1.11.6" + resolved "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.6.tgz" + integrity sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ== + dependencies: + "@xtuc/long" "4.2.2" + +"@webassemblyjs/utf8@1.11.6": + version "1.11.6" + resolved "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.6.tgz" + integrity sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA== + +"@webassemblyjs/wasm-edit@^1.12.1": + version "1.12.1" + resolved "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz" + integrity sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/helper-wasm-section" "1.12.1" + "@webassemblyjs/wasm-gen" "1.12.1" + "@webassemblyjs/wasm-opt" "1.12.1" + "@webassemblyjs/wasm-parser" "1.12.1" + "@webassemblyjs/wast-printer" "1.12.1" + +"@webassemblyjs/wasm-gen@1.12.1": + version "1.12.1" + resolved "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.12.1.tgz" + integrity sha512-TDq4Ojh9fcohAw6OIMXqiIcTq5KUXTGRkVxbSo1hQnSy6lAM5GSdfwWeSxpAo0YzgsgF182E/U0mDNhuA0tW7w== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/ieee754" "1.11.6" + "@webassemblyjs/leb128" "1.11.6" + "@webassemblyjs/utf8" "1.11.6" + +"@webassemblyjs/wasm-opt@1.12.1": + version "1.12.1" + resolved "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.12.1.tgz" + integrity sha512-Jg99j/2gG2iaz3hijw857AVYekZe2SAskcqlWIZXjji5WStnOpVoat3gQfT/Q5tb2djnCjBtMocY/Su1GfxPBg== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" + "@webassemblyjs/wasm-gen" "1.12.1" + "@webassemblyjs/wasm-parser" "1.12.1" + +"@webassemblyjs/wasm-parser@1.12.1", "@webassemblyjs/wasm-parser@^1.12.1": + version "1.12.1" + resolved "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz" + integrity sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-api-error" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/ieee754" "1.11.6" + "@webassemblyjs/leb128" "1.11.6" + "@webassemblyjs/utf8" "1.11.6" + +"@webassemblyjs/wast-printer@1.12.1": + version "1.12.1" + resolved "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.12.1.tgz" + integrity sha512-+X4WAlOisVWQMikjbcvY2e0rwPsKQ9F688lksZhBcPycBBuii3O7m8FACbDMWDojpAqvjIncrG8J0XHKyQfVeA== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@xtuc/long" "4.2.2" + +"@xtuc/ieee754@^1.2.0": + version "1.2.0" + resolved "https://registry.npmmirror.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz" + integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== + +"@xtuc/long@4.2.2": + version "4.2.2" + resolved "https://registry.npmmirror.com/@xtuc/long/-/long-4.2.2.tgz" + integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== + +accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: + version "1.3.8" + resolved "https://registry.npmmirror.com/accepts/-/accepts-1.3.8.tgz" + integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== + dependencies: + mime-types "~2.1.34" + negotiator "0.6.3" + +acorn-dynamic-import@^4.0.0: + version "4.0.0" + resolved "https://registry.npmmirror.com/acorn-dynamic-import/-/acorn-dynamic-import-4.0.0.tgz" + integrity sha512-d3OEjQV4ROpoflsnUA8HozoIR504TFxNivYEUi6uwz0IYhBkTDXGuWlNdMtybRt3nqVx/L6XqMt0FxkXuWKZhw== + +acorn-import-attributes@^1.9.5: + version "1.9.5" + resolved "https://registry.npmjs.org/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz" + integrity sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ== + +acorn-jsx@^5.0.1: + version "5.3.2" + resolved "https://registry.npmmirror.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== + +acorn-walk@^8.0.0: + version "8.2.0" + resolved "https://registry.npmmirror.com/acorn-walk/-/acorn-walk-8.2.0.tgz" + integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== + +acorn@^6.1.1: + version "6.4.2" + resolved "https://registry.npmmirror.com/acorn/-/acorn-6.4.2.tgz" + integrity sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ== + +acorn@^8.0.4, acorn@^8.7.1, acorn@^8.8.2: + version "8.12.1" + resolved "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz" + integrity sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg== + +address@^1.0.1, address@^1.1.2: + version "1.2.1" + resolved "https://registry.npmmirror.com/address/-/address-1.2.1.tgz" + integrity sha512-B+6bi5D34+fDYENiH5qOlA0cV2rAGKuWZ9LeyUUehbXy8e0VS9e498yO0Jeeh+iM+6KbfudHTFjXw2MmJD4QRA== + +aggregate-error@^3.0.0: + version "3.1.0" + resolved "https://registry.npmmirror.com/aggregate-error/-/aggregate-error-3.1.0.tgz" + integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== + dependencies: + clean-stack "^2.0.0" + indent-string "^4.0.0" + +ajv-formats@^2.1.1: + version "2.1.1" + resolved "https://registry.npmmirror.com/ajv-formats/-/ajv-formats-2.1.1.tgz" + integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA== + dependencies: + ajv "^8.0.0" + +ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: + version "3.5.2" + resolved "https://registry.npmmirror.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz" + integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== + +ajv-keywords@^5.0.0: + version "5.1.0" + resolved "https://registry.npmmirror.com/ajv-keywords/-/ajv-keywords-5.1.0.tgz" + integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw== + dependencies: + fast-deep-equal "^3.1.3" + +ajv@^6.12.2, ajv@^6.12.4, ajv@^6.12.5: + version "6.12.6" + resolved "https://registry.npmmirror.com/ajv/-/ajv-6.12.6.tgz" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ajv@^8.0.0, ajv@^8.8.0: + version "8.11.0" + resolved "https://registry.npmmirror.com/ajv/-/ajv-8.11.0.tgz" + integrity sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg== + dependencies: + fast-deep-equal "^3.1.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + uri-js "^4.2.2" + +algoliasearch-helper@^3.5.5: + version "3.11.1" + resolved "https://registry.npmmirror.com/algoliasearch-helper/-/algoliasearch-helper-3.11.1.tgz" + integrity sha512-mvsPN3eK4E0bZG0/WlWJjeqe/bUD2KOEVOl0GyL/TGXn6wcpZU8NOuztGHCUKXkyg5gq6YzUakVTmnmSSO5Yiw== + dependencies: + "@algolia/events" "^4.0.1" + +algoliasearch@^4.0.0, algoliasearch@^4.10.5: + version "4.14.2" + resolved "https://registry.npmmirror.com/algoliasearch/-/algoliasearch-4.14.2.tgz" + integrity sha512-ngbEQonGEmf8dyEh5f+uOIihv4176dgbuOZspiuhmTTBRBuzWu3KCGHre6uHj5YyuC7pNvQGzB6ZNJyZi0z+Sg== + dependencies: + "@algolia/cache-browser-local-storage" "4.14.2" + "@algolia/cache-common" "4.14.2" + "@algolia/cache-in-memory" "4.14.2" + "@algolia/client-account" "4.14.2" + "@algolia/client-analytics" "4.14.2" + "@algolia/client-common" "4.14.2" + "@algolia/client-personalization" "4.14.2" + "@algolia/client-search" "4.14.2" + "@algolia/logger-common" "4.14.2" + "@algolia/logger-console" "4.14.2" + "@algolia/requester-browser-xhr" "4.14.2" + "@algolia/requester-common" "4.14.2" + "@algolia/requester-node-http" "4.14.2" + "@algolia/transporter" "4.14.2" + +ansi-align@^3.0.0: + version "3.0.1" + resolved "https://registry.npmmirror.com/ansi-align/-/ansi-align-3.0.1.tgz" + integrity sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w== + dependencies: + string-width "^4.1.0" + +ansi-html-community@^0.0.8: + version "0.0.8" + resolved "https://registry.npmmirror.com/ansi-html-community/-/ansi-html-community-0.0.8.tgz" + integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.npmmirror.com/ansi-regex/-/ansi-regex-5.0.1.tgz" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-3.2.1.tgz" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +ansi-styles@^4.0.0, ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-4.3.0.tgz" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +anymatch@~3.1.2: + version "3.1.2" + resolved "https://registry.npmmirror.com/anymatch/-/anymatch-3.1.2.tgz" + integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + +arg@^5.0.0: + version "5.0.2" + resolved "https://registry.npmmirror.com/arg/-/arg-5.0.2.tgz" + integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== + +argparse@^1.0.7: + version "1.0.10" + resolved "https://registry.npmmirror.com/argparse/-/argparse-1.0.10.tgz" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +argparse@^2.0.1: + version "2.0.1" + resolved "https://registry.npmmirror.com/argparse/-/argparse-2.0.1.tgz" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + +array-flatten@1.1.1: + version "1.1.1" + resolved "https://registry.npmmirror.com/array-flatten/-/array-flatten-1.1.1.tgz" + integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== + +array-flatten@^2.1.2: + version "2.1.2" + resolved "https://registry.npmmirror.com/array-flatten/-/array-flatten-2.1.2.tgz" + integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== + +array-union@^2.1.0: + version "2.1.0" + resolved "https://registry.npmmirror.com/array-union/-/array-union-2.1.0.tgz" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +array.prototype.reduce@^1.0.4: + version "1.0.5" + resolved "https://registry.npmmirror.com/array.prototype.reduce/-/array.prototype.reduce-1.0.5.tgz" + integrity sha512-kDdugMl7id9COE8R7MHF5jWk7Dqt/fs4Pv+JXoICnYwqpjjjbUurz6w5fT5IG6brLdJhv6/VoHB0H7oyIBXd+Q== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + es-array-method-boxes-properly "^1.0.0" + is-string "^1.0.7" + +asap@~2.0.3: + version "2.0.6" + resolved "https://registry.npmmirror.com/asap/-/asap-2.0.6.tgz" + integrity sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA== + +at-least-node@^1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/at-least-node/-/at-least-node-1.0.0.tgz" + integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== + +autoprefixer@^10.3.5, autoprefixer@^10.4.12: + version "10.4.13" + resolved "https://registry.npmmirror.com/autoprefixer/-/autoprefixer-10.4.13.tgz" + integrity sha512-49vKpMqcZYsJjwotvt4+h/BCjJVnhGwcLpDt5xkcaOG3eLrG/HUYLagrihYsQ+qrIBgIzX1Rw7a6L8I/ZA1Atg== + dependencies: + browserslist "^4.21.4" + caniuse-lite "^1.0.30001426" + fraction.js "^4.2.0" + normalize-range "^0.1.2" + picocolors "^1.0.0" + postcss-value-parser "^4.2.0" + +axios@^0.25.0: + version "0.25.0" + resolved "https://registry.npmmirror.com/axios/-/axios-0.25.0.tgz" + integrity sha512-cD8FOb0tRH3uuEe6+evtAbgJtfxr7ly3fQjYcMcuPlgkwVS9xboaVIpcDV+cYQe+yGykgwZCs1pzjntcGa6l5g== + dependencies: + follow-redirects "^1.14.7" + +babel-loader@^8.2.2: + version "8.3.0" + resolved "https://registry.npmmirror.com/babel-loader/-/babel-loader-8.3.0.tgz" + integrity sha512-H8SvsMF+m9t15HNLMipppzkC+Y2Yq+v3SonZyU70RBL/h1gxPkH08Ot8pEE9Z4Kd+czyWJClmFS8qzIP9OZ04Q== + dependencies: + find-cache-dir "^3.3.1" + loader-utils "^2.0.0" + make-dir "^3.1.0" + schema-utils "^2.6.5" + +babel-plugin-apply-mdx-type-prop@1.6.22: + version "1.6.22" + resolved "https://registry.npmmirror.com/babel-plugin-apply-mdx-type-prop/-/babel-plugin-apply-mdx-type-prop-1.6.22.tgz" + integrity sha512-VefL+8o+F/DfK24lPZMtJctrCVOfgbqLAGZSkxwhazQv4VxPg3Za/i40fu22KR2m8eEda+IfSOlPLUSIiLcnCQ== + dependencies: + "@babel/helper-plugin-utils" "7.10.4" + "@mdx-js/util" "1.6.22" + +babel-plugin-dynamic-import-node@2.3.0: + version "2.3.0" + resolved "https://registry.npmmirror.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz" + integrity sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ== + dependencies: + object.assign "^4.1.0" + +babel-plugin-extract-import-names@1.6.22: + version "1.6.22" + resolved "https://registry.npmmirror.com/babel-plugin-extract-import-names/-/babel-plugin-extract-import-names-1.6.22.tgz" + integrity sha512-yJ9BsJaISua7d8zNT7oRG1ZLBJCIdZ4PZqmH8qa9N5AK01ifk3fnkc98AXhtzE7UkfCsEumvoQWgoYLhOnJ7jQ== + dependencies: + "@babel/helper-plugin-utils" "7.10.4" + +babel-plugin-polyfill-corejs2@^0.3.3: + version "0.3.3" + resolved "https://registry.npmmirror.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.3.tgz" + integrity sha512-8hOdmFYFSZhqg2C/JgLUQ+t52o5nirNwaWM2B9LWteozwIvM14VSwdsCAUET10qT+kmySAlseadmfeeSWFCy+Q== + dependencies: + "@babel/compat-data" "^7.17.7" + "@babel/helper-define-polyfill-provider" "^0.3.3" + semver "^6.1.1" + +babel-plugin-polyfill-corejs3@^0.6.0: + version "0.6.0" + resolved "https://registry.npmmirror.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.6.0.tgz" + integrity sha512-+eHqR6OPcBhJOGgsIar7xoAB1GcSwVUA3XjAd7HJNzOXT4wv6/H7KIdA/Nc60cvUlDbKApmqNvD1B1bzOt4nyA== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.3" + core-js-compat "^3.25.1" + +babel-plugin-polyfill-regenerator@^0.4.1: + version "0.4.1" + resolved "https://registry.npmmirror.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.4.1.tgz" + integrity sha512-NtQGmyQDXjQqQ+IzRkBVwEOz9lQ4zxAQZgoAYEtU9dJjnl1Oc98qnN7jcp+bE7O7aYzVpavXE3/VKXNzUbh7aw== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.3" + +bail@^1.0.0: + version "1.0.5" + resolved "https://registry.npmmirror.com/bail/-/bail-1.0.5.tgz" + integrity sha512-xFbRxM1tahm08yHBP16MMjVUAvDaBMD38zsM9EMAUN61omwLmKlOpB/Zku5QkjZ8TZ4vn53pj+t518cH0S03RQ== + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.npmmirror.com/balanced-match/-/balanced-match-1.0.2.tgz" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +base16@^1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/base16/-/base16-1.0.0.tgz" + integrity sha512-pNdYkNPiJUnEhnfXV56+sQy8+AaPcG3POZAUnwr4EeqCUZFz4u2PePbo3e5Gj4ziYPCWGUZT9RHisvJKnwFuBQ== + +batch@0.6.1: + version "0.6.1" + resolved "https://registry.npmmirror.com/batch/-/batch-0.6.1.tgz" + integrity sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw== + +big.js@^5.2.2: + version "5.2.2" + resolved "https://registry.npmmirror.com/big.js/-/big.js-5.2.2.tgz" + integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== + +binary-extensions@^2.0.0: + version "2.2.0" + resolved "https://registry.npmmirror.com/binary-extensions/-/binary-extensions-2.2.0.tgz" + integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== + +body-parser@1.20.1: + version "1.20.1" + resolved "https://registry.npmmirror.com/body-parser/-/body-parser-1.20.1.tgz" + integrity sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw== + dependencies: + bytes "3.1.2" + content-type "~1.0.4" + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + http-errors "2.0.0" + iconv-lite "0.4.24" + on-finished "2.4.1" + qs "6.11.0" + raw-body "2.5.1" + type-is "~1.6.18" + unpipe "1.0.0" + +bonjour-service@^1.0.11: + version "1.0.14" + resolved "https://registry.npmmirror.com/bonjour-service/-/bonjour-service-1.0.14.tgz" + integrity sha512-HIMbgLnk1Vqvs6B4Wq5ep7mxvj9sGz5d1JJyDNSGNIdA/w2MCz6GTjWTdjqOJV1bEPj+6IkxDvWNFKEBxNt4kQ== + dependencies: + array-flatten "^2.1.2" + dns-equal "^1.0.0" + fast-deep-equal "^3.1.3" + multicast-dns "^7.2.5" + +boolbase@^1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/boolbase/-/boolbase-1.0.0.tgz" + integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== + +boxen@^5.0.0, boxen@^5.0.1: + version "5.1.2" + resolved "https://registry.npmmirror.com/boxen/-/boxen-5.1.2.tgz" + integrity sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ== + dependencies: + ansi-align "^3.0.0" + camelcase "^6.2.0" + chalk "^4.1.0" + cli-boxes "^2.2.1" + string-width "^4.2.2" + type-fest "^0.20.2" + widest-line "^3.1.0" + wrap-ansi "^7.0.0" + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-1.1.11.tgz" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +braces@^3.0.2, braces@~3.0.2: + version "3.0.2" + resolved "https://registry.npmmirror.com/braces/-/braces-3.0.2.tgz" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + dependencies: + fill-range "^7.0.1" + +browserslist@^4.0.0, browserslist@^4.16.5, browserslist@^4.16.6, browserslist@^4.21.10, browserslist@^4.21.3, browserslist@^4.21.4: + version "4.23.3" + resolved "https://registry.npmjs.org/browserslist/-/browserslist-4.23.3.tgz" + integrity sha512-btwCFJVjI4YWDNfau8RhZ+B1Q/VLoUITrm3RlP6y1tYGWIOa+InuYiRGXUBXo8nA1qKmHMyLB/iVQg5TT4eFoA== + dependencies: + caniuse-lite "^1.0.30001646" + electron-to-chromium "^1.5.4" + node-releases "^2.0.18" + update-browserslist-db "^1.1.0" + +buble-jsx-only@^0.19.8: + version "0.19.8" + resolved "https://registry.npmmirror.com/buble-jsx-only/-/buble-jsx-only-0.19.8.tgz" + integrity sha512-7AW19pf7PrKFnGTEDzs6u9+JZqQwM1VnLS19OlqYDhXomtFFknnoQJAPHeg84RMFWAvOhYrG7harizJNwUKJsA== + dependencies: + acorn "^6.1.1" + acorn-dynamic-import "^4.0.0" + acorn-jsx "^5.0.1" + chalk "^2.4.2" + magic-string "^0.25.3" + minimist "^1.2.0" + regexpu-core "^4.5.4" + +buffer-from@^1.0.0: + version "1.1.2" + resolved "https://registry.npmmirror.com/buffer-from/-/buffer-from-1.1.2.tgz" + integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== + +bytes@3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/bytes/-/bytes-3.0.0.tgz" + integrity sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw== + +bytes@3.1.2: + version "3.1.2" + resolved "https://registry.npmmirror.com/bytes/-/bytes-3.1.2.tgz" + integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== + +cacheable-lookup@^5.0.3: + version "5.0.4" + resolved "https://registry.npmmirror.com/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz" + integrity sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA== + +cacheable-request@^7.0.2: + version "7.0.2" + resolved "https://registry.npmmirror.com/cacheable-request/-/cacheable-request-7.0.2.tgz" + integrity sha512-pouW8/FmiPQbuGpkXQ9BAPv/Mo5xDGANgSNXzTzJ8DrKGuXOssM4wIQRjfanNRh3Yu5cfYPvcorqbhg2KIJtew== + dependencies: + clone-response "^1.0.2" + get-stream "^5.1.0" + http-cache-semantics "^4.0.0" + keyv "^4.0.0" + lowercase-keys "^2.0.0" + normalize-url "^6.0.1" + responselike "^2.0.0" + +call-bind@^1.0.0, call-bind@^1.0.2: + version "1.0.2" + resolved "https://registry.npmmirror.com/call-bind/-/call-bind-1.0.2.tgz" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.2" + +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.npmmirror.com/callsites/-/callsites-3.1.0.tgz" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +camel-case@^4.1.2: + version "4.1.2" + resolved "https://registry.npmmirror.com/camel-case/-/camel-case-4.1.2.tgz" + integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw== + dependencies: + pascal-case "^3.1.2" + tslib "^2.0.3" + +camelcase-css@2.0.1: + version "2.0.1" + resolved "https://registry.npmmirror.com/camelcase-css/-/camelcase-css-2.0.1.tgz" + integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== + +camelcase@^6.2.0: + version "6.3.0" + resolved "https://registry.npmmirror.com/camelcase/-/camelcase-6.3.0.tgz" + integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== + +caniuse-api@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/caniuse-api/-/caniuse-api-3.0.0.tgz" + integrity sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw== + dependencies: + browserslist "^4.0.0" + caniuse-lite "^1.0.0" + lodash.memoize "^4.1.2" + lodash.uniq "^4.5.0" + +caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001426, caniuse-lite@^1.0.30001646: + version "1.0.30001651" + resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001651.tgz" + integrity sha512-9Cf+Xv1jJNe1xPZLGuUXLNkE1BoDkqRqYyFJ9TDYSqhduqA4hu4oR9HluGoWYQC/aj8WHjsGVV+bwkh0+tegRg== + +ccount@^1.0.0, ccount@^1.0.3: + version "1.1.0" + resolved "https://registry.npmmirror.com/ccount/-/ccount-1.1.0.tgz" + integrity sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg== + +chalk@^2.4.1, chalk@^2.4.2: + version "2.4.2" + resolved "https://registry.npmmirror.com/chalk/-/chalk-2.4.2.tgz" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chalk@^4.1.0, chalk@^4.1.2: + version "4.1.2" + resolved "https://registry.npmmirror.com/chalk/-/chalk-4.1.2.tgz" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +character-entities-legacy@^1.0.0: + version "1.1.4" + resolved "https://registry.npmmirror.com/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz" + integrity sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA== + +character-entities@^1.0.0: + version "1.2.4" + resolved "https://registry.npmmirror.com/character-entities/-/character-entities-1.2.4.tgz" + integrity sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw== + +character-reference-invalid@^1.0.0: + version "1.1.4" + resolved "https://registry.npmmirror.com/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz" + integrity sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg== + +cheerio-select@^2.1.0: + version "2.1.0" + resolved "https://registry.npmmirror.com/cheerio-select/-/cheerio-select-2.1.0.tgz" + integrity sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g== + dependencies: + boolbase "^1.0.0" + css-select "^5.1.0" + css-what "^6.1.0" + domelementtype "^2.3.0" + domhandler "^5.0.3" + domutils "^3.0.1" + +cheerio@^1.0.0-rc.3: + version "1.0.0-rc.12" + resolved "https://registry.npmmirror.com/cheerio/-/cheerio-1.0.0-rc.12.tgz" + integrity sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q== + dependencies: + cheerio-select "^2.1.0" + dom-serializer "^2.0.0" + domhandler "^5.0.3" + domutils "^3.0.1" + htmlparser2 "^8.0.1" + parse5 "^7.0.0" + parse5-htmlparser2-tree-adapter "^7.0.0" + +chokidar@^3.4.2, chokidar@^3.5.2, chokidar@^3.5.3: + version "3.5.3" + resolved "https://registry.npmmirror.com/chokidar/-/chokidar-3.5.3.tgz" + integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== + dependencies: + anymatch "~3.1.2" + braces "~3.0.2" + glob-parent "~5.1.2" + is-binary-path "~2.1.0" + is-glob "~4.0.1" + normalize-path "~3.0.0" + readdirp "~3.6.0" + optionalDependencies: + fsevents "~2.3.2" + +chrome-trace-event@^1.0.2: + version "1.0.3" + resolved "https://registry.npmmirror.com/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz" + integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== + +ci-info@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/ci-info/-/ci-info-2.0.0.tgz" + integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== + +ci-info@^3.1.1: + version "3.5.0" + resolved "https://registry.npmmirror.com/ci-info/-/ci-info-3.5.0.tgz" + integrity sha512-yH4RezKOGlOhxkmhbeNuC4eYZKAUsEaGtBuBzDDP1eFUKiccDWzBABxBfOx31IDwDIXMTxWuwAxUGModvkbuVw== + +clean-css@^5.1.5, clean-css@^5.2.2: + version "5.3.1" + resolved "https://registry.npmmirror.com/clean-css/-/clean-css-5.3.1.tgz" + integrity sha512-lCr8OHhiWCTw4v8POJovCoh4T7I9U11yVsPjMWWnnMmp9ZowCxyad1Pathle/9HjaDp+fdQKjO9fQydE6RHTZg== + dependencies: + source-map "~0.6.0" + +clean-stack@^2.0.0: + version "2.2.0" + resolved "https://registry.npmmirror.com/clean-stack/-/clean-stack-2.2.0.tgz" + integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== + +cli-boxes@^2.2.1: + version "2.2.1" + resolved "https://registry.npmmirror.com/cli-boxes/-/cli-boxes-2.2.1.tgz" + integrity sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw== + +clone-deep@^4.0.1: + version "4.0.1" + resolved "https://registry.npmmirror.com/clone-deep/-/clone-deep-4.0.1.tgz" + integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ== + dependencies: + is-plain-object "^2.0.4" + kind-of "^6.0.2" + shallow-clone "^3.0.0" + +clone-response@^1.0.2: + version "1.0.3" + resolved "https://registry.npmmirror.com/clone-response/-/clone-response-1.0.3.tgz" + integrity sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA== + dependencies: + mimic-response "^1.0.0" + +clsx@^1.1.1: + version "1.2.1" + resolved "https://registry.npmmirror.com/clsx/-/clsx-1.2.1.tgz" + integrity sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg== + +coa@^2.0.2: + version "2.0.2" + resolved "https://registry.npmmirror.com/coa/-/coa-2.0.2.tgz" + integrity sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA== + dependencies: + "@types/q" "^1.5.1" + chalk "^2.4.1" + q "^1.1.2" + +collapse-white-space@^1.0.2: + version "1.0.6" + resolved "https://registry.npmmirror.com/collapse-white-space/-/collapse-white-space-1.0.6.tgz" + integrity sha512-jEovNnrhMuqyCcjfEJA56v0Xq8SkIoPKDyaHahwo3POf4qcSXqMYuwNcOTzp74vTsR9Tn08z4MxWqAhcekogkQ== + +color-convert@^1.9.0: + version "1.9.3" + resolved "https://registry.npmmirror.com/color-convert/-/color-convert-1.9.3.tgz" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.npmmirror.com/color-convert/-/color-convert-2.0.1.tgz" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.npmmirror.com/color-name/-/color-name-1.1.3.tgz" + integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.npmmirror.com/color-name/-/color-name-1.1.4.tgz" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +colord@^2.9.1: + version "2.9.3" + resolved "https://registry.npmmirror.com/colord/-/colord-2.9.3.tgz" + integrity sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw== + +colorette@^2.0.10: + version "2.0.19" + resolved "https://registry.npmmirror.com/colorette/-/colorette-2.0.19.tgz" + integrity sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ== + +combine-promises@^1.1.0: + version "1.1.0" + resolved "https://registry.npmmirror.com/combine-promises/-/combine-promises-1.1.0.tgz" + integrity sha512-ZI9jvcLDxqwaXEixOhArm3r7ReIivsXkpbyEWyeOhzz1QS0iSgBPnWvEqvIQtYyamGCYA88gFhmUrs9hrrQ0pg== + +comma-separated-tokens@^1.0.0: + version "1.0.8" + resolved "https://registry.npmmirror.com/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz" + integrity sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw== + +commander@^2.19.0, commander@^2.20.0: + version "2.20.3" + resolved "https://registry.npmmirror.com/commander/-/commander-2.20.3.tgz" + integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== + +commander@^5.1.0: + version "5.1.0" + resolved "https://registry.npmmirror.com/commander/-/commander-5.1.0.tgz" + integrity sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg== + +commander@^7.2.0: + version "7.2.0" + resolved "https://registry.npmmirror.com/commander/-/commander-7.2.0.tgz" + integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== + +commander@^8.3.0: + version "8.3.0" + resolved "https://registry.npmmirror.com/commander/-/commander-8.3.0.tgz" + integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== + +commondir@^1.0.1: + version "1.0.1" + resolved "https://registry.npmmirror.com/commondir/-/commondir-1.0.1.tgz" + integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg== + +compressible@~2.0.16: + version "2.0.18" + resolved "https://registry.npmmirror.com/compressible/-/compressible-2.0.18.tgz" + integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== + dependencies: + mime-db ">= 1.43.0 < 2" + +compression@^1.7.4: + version "1.7.4" + resolved "https://registry.npmmirror.com/compression/-/compression-1.7.4.tgz" + integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== + dependencies: + accepts "~1.3.5" + bytes "3.0.0" + compressible "~2.0.16" + debug "2.6.9" + on-headers "~1.0.2" + safe-buffer "5.1.2" + vary "~1.1.2" + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.npmmirror.com/concat-map/-/concat-map-0.0.1.tgz" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== + +configstore@^5.0.1: + version "5.0.1" + resolved "https://registry.npmmirror.com/configstore/-/configstore-5.0.1.tgz" + integrity sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA== + dependencies: + dot-prop "^5.2.0" + graceful-fs "^4.1.2" + make-dir "^3.0.0" + unique-string "^2.0.0" + write-file-atomic "^3.0.0" + xdg-basedir "^4.0.0" + +connect-history-api-fallback@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz" + integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== + +consola@^2.15.3: + version "2.15.3" + resolved "https://registry.npmmirror.com/consola/-/consola-2.15.3.tgz" + integrity sha512-9vAdYbHj6x2fLKC4+oPH0kFzY/orMZyG2Aj+kNylHxKGJ/Ed4dpNyAQYwJOdqO4zdM7XpVHmyejQDcQHrnuXbw== + +content-disposition@0.5.2: + version "0.5.2" + resolved "https://registry.npmmirror.com/content-disposition/-/content-disposition-0.5.2.tgz" + integrity sha512-kRGRZw3bLlFISDBgwTSA1TMBFN6J6GWDeubmDE3AF+3+yXL8hTWv8r5rkLbqYXY4RjPk/EzHnClI3zQf1cFmHA== + +content-disposition@0.5.4: + version "0.5.4" + resolved "https://registry.npmmirror.com/content-disposition/-/content-disposition-0.5.4.tgz" + integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== + dependencies: + safe-buffer "5.2.1" + +content-type@~1.0.4: + version "1.0.4" + resolved "https://registry.npmmirror.com/content-type/-/content-type-1.0.4.tgz" + integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== + +convert-source-map@^1.7.0: + version "1.9.0" + resolved "https://registry.npmmirror.com/convert-source-map/-/convert-source-map-1.9.0.tgz" + integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== + +cookie-signature@1.0.6: + version "1.0.6" + resolved "https://registry.npmmirror.com/cookie-signature/-/cookie-signature-1.0.6.tgz" + integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== + +cookie@0.5.0: + version "0.5.0" + resolved "https://registry.npmmirror.com/cookie/-/cookie-0.5.0.tgz" + integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== + +copy-text-to-clipboard@^3.0.1: + version "3.0.1" + resolved "https://registry.npmmirror.com/copy-text-to-clipboard/-/copy-text-to-clipboard-3.0.1.tgz" + integrity sha512-rvVsHrpFcL4F2P8ihsoLdFHmd404+CMg71S756oRSeQgqk51U3kicGdnvfkrxva0xXH92SjGS62B0XIJsbh+9Q== + +copy-webpack-plugin@^9.0.1: + version "9.1.0" + resolved "https://registry.npmmirror.com/copy-webpack-plugin/-/copy-webpack-plugin-9.1.0.tgz" + integrity sha512-rxnR7PaGigJzhqETHGmAcxKnLZSR5u1Y3/bcIv/1FnqXedcL/E2ewK7ZCNrArJKCiSv8yVXhTqetJh8inDvfsA== + dependencies: + fast-glob "^3.2.7" + glob-parent "^6.0.1" + globby "^11.0.3" + normalize-path "^3.0.0" + schema-utils "^3.1.1" + serialize-javascript "^6.0.0" + +core-js-compat@^3.25.1: + version "3.26.0" + resolved "https://registry.npmmirror.com/core-js-compat/-/core-js-compat-3.26.0.tgz" + integrity sha512-piOX9Go+Z4f9ZiBFLnZ5VrOpBl0h7IGCkiFUN11QTe6LjAvOT3ifL/5TdoizMh99hcGy5SoLyWbapIY/PIb/3A== + dependencies: + browserslist "^4.21.4" + +core-js-pure@^3.25.1: + version "3.26.0" + resolved "https://registry.npmmirror.com/core-js-pure/-/core-js-pure-3.26.0.tgz" + integrity sha512-LiN6fylpVBVwT8twhhluD9TzXmZQQsr2I2eIKtWNbZI1XMfBT7CV18itaN6RA7EtQd/SDdRx/wzvAShX2HvhQA== + +core-js@^3.18.0: + version "3.26.0" + resolved "https://registry.npmmirror.com/core-js/-/core-js-3.26.0.tgz" + integrity sha512-+DkDrhoR4Y0PxDz6rurahuB+I45OsEUv8E1maPTB6OuHRohMMcznBq9TMpdpDMm/hUPob/mJJS3PqgbHpMTQgw== + +core-util-is@~1.0.0: + version "1.0.3" + resolved "https://registry.npmmirror.com/core-util-is/-/core-util-is-1.0.3.tgz" + integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== + +cosmiconfig@^6.0.0: + version "6.0.0" + resolved "https://registry.npmmirror.com/cosmiconfig/-/cosmiconfig-6.0.0.tgz" + integrity sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.1.0" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.7.2" + +cosmiconfig@^7.0.0, cosmiconfig@^7.0.1: + version "7.0.1" + resolved "https://registry.npmmirror.com/cosmiconfig/-/cosmiconfig-7.0.1.tgz" + integrity sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.2.1" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.10.0" + +cross-fetch@^3.1.5: + version "3.1.5" + resolved "https://registry.npmmirror.com/cross-fetch/-/cross-fetch-3.1.5.tgz" + integrity sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw== + dependencies: + node-fetch "2.6.7" + +cross-spawn@^7.0.3: + version "7.0.3" + resolved "https://registry.npmmirror.com/cross-spawn/-/cross-spawn-7.0.3.tgz" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +crypto-random-string@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz" + integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== + +css-declaration-sorter@^6.3.1: + version "6.3.1" + resolved "https://registry.npmmirror.com/css-declaration-sorter/-/css-declaration-sorter-6.3.1.tgz" + integrity sha512-fBffmak0bPAnyqc/HO8C3n2sHrp9wcqQz6ES9koRF2/mLOVAx9zIQ3Y7R29sYCteTPqMCwns4WYQoCX91Xl3+w== + +css-loader@^5.1.1: + version "5.2.7" + resolved "https://registry.npmmirror.com/css-loader/-/css-loader-5.2.7.tgz" + integrity sha512-Q7mOvpBNBG7YrVGMxRxcBJZFL75o+cH2abNASdibkj/fffYD8qWbInZrD0S9ccI6vZclF3DsHE7njGlLtaHbhg== + dependencies: + icss-utils "^5.1.0" + loader-utils "^2.0.0" + postcss "^8.2.15" + postcss-modules-extract-imports "^3.0.0" + postcss-modules-local-by-default "^4.0.0" + postcss-modules-scope "^3.0.0" + postcss-modules-values "^4.0.0" + postcss-value-parser "^4.1.0" + schema-utils "^3.0.0" + semver "^7.3.5" + +css-minimizer-webpack-plugin@^3.0.2: + version "3.4.1" + resolved "https://registry.npmmirror.com/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.4.1.tgz" + integrity sha512-1u6D71zeIfgngN2XNRJefc/hY7Ybsxd74Jm4qngIXyUEk7fss3VUzuHxLAq/R8NAba4QU9OUSaMZlbpRc7bM4Q== + dependencies: + cssnano "^5.0.6" + jest-worker "^27.0.2" + postcss "^8.3.5" + schema-utils "^4.0.0" + serialize-javascript "^6.0.0" + source-map "^0.6.1" + +css-select-base-adapter@^0.1.1: + version "0.1.1" + resolved "https://registry.npmmirror.com/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz" + integrity sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w== + +css-select@^2.0.0: + version "2.1.0" + resolved "https://registry.npmmirror.com/css-select/-/css-select-2.1.0.tgz" + integrity sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ== + dependencies: + boolbase "^1.0.0" + css-what "^3.2.1" + domutils "^1.7.0" + nth-check "^1.0.2" + +css-select@^4.1.3: + version "4.3.0" + resolved "https://registry.npmmirror.com/css-select/-/css-select-4.3.0.tgz" + integrity sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ== + dependencies: + boolbase "^1.0.0" + css-what "^6.0.1" + domhandler "^4.3.1" + domutils "^2.8.0" + nth-check "^2.0.1" + +css-select@^5.1.0: + version "5.1.0" + resolved "https://registry.npmmirror.com/css-select/-/css-select-5.1.0.tgz" + integrity sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg== + dependencies: + boolbase "^1.0.0" + css-what "^6.1.0" + domhandler "^5.0.2" + domutils "^3.0.1" + nth-check "^2.0.1" + +css-tree@1.0.0-alpha.37: + version "1.0.0-alpha.37" + resolved "https://registry.npmmirror.com/css-tree/-/css-tree-1.0.0-alpha.37.tgz" + integrity sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg== + dependencies: + mdn-data "2.0.4" + source-map "^0.6.1" + +css-tree@^1.1.2, css-tree@^1.1.3: + version "1.1.3" + resolved "https://registry.npmmirror.com/css-tree/-/css-tree-1.1.3.tgz" + integrity sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q== + dependencies: + mdn-data "2.0.14" + source-map "^0.6.1" + +css-what@^3.2.1: + version "3.4.2" + resolved "https://registry.npmmirror.com/css-what/-/css-what-3.4.2.tgz" + integrity sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ== + +css-what@^6.0.1, css-what@^6.1.0: + version "6.1.0" + resolved "https://registry.npmmirror.com/css-what/-/css-what-6.1.0.tgz" + integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== + +cssesc@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/cssesc/-/cssesc-3.0.0.tgz" + integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== + +cssnano-preset-advanced@^5.1.4: + version "5.3.9" + resolved "https://registry.npmmirror.com/cssnano-preset-advanced/-/cssnano-preset-advanced-5.3.9.tgz" + integrity sha512-njnh4pp1xCsibJcEHnWZb4EEzni0ePMqPuPNyuWT4Z+YeXmsgqNuTPIljXFEXhxGsWs9183JkXgHxc1TcsahIg== + dependencies: + autoprefixer "^10.4.12" + cssnano-preset-default "^5.2.13" + postcss-discard-unused "^5.1.0" + postcss-merge-idents "^5.1.1" + postcss-reduce-idents "^5.2.0" + postcss-zindex "^5.1.0" + +cssnano-preset-default@^5.2.13: + version "5.2.13" + resolved "https://registry.npmmirror.com/cssnano-preset-default/-/cssnano-preset-default-5.2.13.tgz" + integrity sha512-PX7sQ4Pb+UtOWuz8A1d+Rbi+WimBIxJTRyBdgGp1J75VU0r/HFQeLnMYgHiCAp6AR4rqrc7Y4R+1Rjk3KJz6DQ== + dependencies: + css-declaration-sorter "^6.3.1" + cssnano-utils "^3.1.0" + postcss-calc "^8.2.3" + postcss-colormin "^5.3.0" + postcss-convert-values "^5.1.3" + postcss-discard-comments "^5.1.2" + postcss-discard-duplicates "^5.1.0" + postcss-discard-empty "^5.1.1" + postcss-discard-overridden "^5.1.0" + postcss-merge-longhand "^5.1.7" + postcss-merge-rules "^5.1.3" + postcss-minify-font-values "^5.1.0" + postcss-minify-gradients "^5.1.1" + postcss-minify-params "^5.1.4" + postcss-minify-selectors "^5.2.1" + postcss-normalize-charset "^5.1.0" + postcss-normalize-display-values "^5.1.0" + postcss-normalize-positions "^5.1.1" + postcss-normalize-repeat-style "^5.1.1" + postcss-normalize-string "^5.1.0" + postcss-normalize-timing-functions "^5.1.0" + postcss-normalize-unicode "^5.1.1" + postcss-normalize-url "^5.1.0" + postcss-normalize-whitespace "^5.1.1" + postcss-ordered-values "^5.1.3" + postcss-reduce-initial "^5.1.1" + postcss-reduce-transforms "^5.1.0" + postcss-svgo "^5.1.0" + postcss-unique-selectors "^5.1.1" + +cssnano-utils@^3.1.0: + version "3.1.0" + resolved "https://registry.npmmirror.com/cssnano-utils/-/cssnano-utils-3.1.0.tgz" + integrity sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA== + +cssnano@^5.0.6, cssnano@^5.0.8: + version "5.1.14" + resolved "https://registry.npmmirror.com/cssnano/-/cssnano-5.1.14.tgz" + integrity sha512-Oou7ihiTocbKqi0J1bB+TRJIQX5RMR3JghA8hcWSw9mjBLQ5Y3RWqEDoYG3sRNlAbCIXpqMoZGbq5KDR3vdzgw== + dependencies: + cssnano-preset-default "^5.2.13" + lilconfig "^2.0.3" + yaml "^1.10.2" + +csso@^4.0.2, csso@^4.2.0: + version "4.2.0" + resolved "https://registry.npmmirror.com/csso/-/csso-4.2.0.tgz" + integrity sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA== + dependencies: + css-tree "^1.1.2" + +csstype@^3.0.2: + version "3.1.1" + resolved "https://registry.npmmirror.com/csstype/-/csstype-3.1.1.tgz" + integrity sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw== + +debug@2.6.9, debug@^2.6.0: + version "2.6.9" + resolved "https://registry.npmmirror.com/debug/-/debug-2.6.9.tgz" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.2.0: + version "4.3.4" + resolved "https://registry.npmmirror.com/debug/-/debug-4.3.4.tgz" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + +decompress-response@^6.0.0: + version "6.0.0" + resolved "https://registry.npmmirror.com/decompress-response/-/decompress-response-6.0.0.tgz" + integrity sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ== + dependencies: + mimic-response "^3.1.0" + +deep-extend@^0.6.0: + version "0.6.0" + resolved "https://registry.npmmirror.com/deep-extend/-/deep-extend-0.6.0.tgz" + integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== + +deepmerge@^4.2.2: + version "4.2.2" + resolved "https://registry.npmmirror.com/deepmerge/-/deepmerge-4.2.2.tgz" + integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== + +default-gateway@^6.0.3: + version "6.0.3" + resolved "https://registry.npmmirror.com/default-gateway/-/default-gateway-6.0.3.tgz" + integrity sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg== + dependencies: + execa "^5.0.0" + +defer-to-connect@^2.0.0: + version "2.0.1" + resolved "https://registry.npmmirror.com/defer-to-connect/-/defer-to-connect-2.0.1.tgz" + integrity sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg== + +define-lazy-prop@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz" + integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== + +define-properties@^1.1.3, define-properties@^1.1.4: + version "1.1.4" + resolved "https://registry.npmmirror.com/define-properties/-/define-properties-1.1.4.tgz" + integrity sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA== + dependencies: + has-property-descriptors "^1.0.0" + object-keys "^1.1.1" + +del@^6.0.0: + version "6.1.1" + resolved "https://registry.npmmirror.com/del/-/del-6.1.1.tgz" + integrity sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg== + dependencies: + globby "^11.0.1" + graceful-fs "^4.2.4" + is-glob "^4.0.1" + is-path-cwd "^2.2.0" + is-path-inside "^3.0.2" + p-map "^4.0.0" + rimraf "^3.0.2" + slash "^3.0.0" + +depd@2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/depd/-/depd-2.0.0.tgz" + integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== + +depd@~1.1.2: + version "1.1.2" + resolved "https://registry.npmmirror.com/depd/-/depd-1.1.2.tgz" + integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ== + +destroy@1.2.0: + version "1.2.0" + resolved "https://registry.npmmirror.com/destroy/-/destroy-1.2.0.tgz" + integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== + +detab@2.0.4: + version "2.0.4" + resolved "https://registry.npmmirror.com/detab/-/detab-2.0.4.tgz" + integrity sha512-8zdsQA5bIkoRECvCrNKPla84lyoR7DSAyf7p0YgXzBO9PDJx8KntPUay7NS6yp+KdxdVtiE5SpHKtbp2ZQyA9g== + dependencies: + repeat-string "^1.5.4" + +detect-node@^2.0.4: + version "2.1.0" + resolved "https://registry.npmmirror.com/detect-node/-/detect-node-2.1.0.tgz" + integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== + +detect-port-alt@^1.1.6: + version "1.1.6" + resolved "https://registry.npmmirror.com/detect-port-alt/-/detect-port-alt-1.1.6.tgz" + integrity sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q== + dependencies: + address "^1.0.1" + debug "^2.6.0" + +detect-port@^1.3.0: + version "1.5.1" + resolved "https://registry.npmmirror.com/detect-port/-/detect-port-1.5.1.tgz" + integrity sha512-aBzdj76lueB6uUst5iAs7+0H/oOjqI5D16XUWxlWMIMROhcM0rfsNVk93zTngq1dDNpoXRr++Sus7ETAExppAQ== + dependencies: + address "^1.0.1" + debug "4" + +dir-glob@^3.0.1: + version "3.0.1" + resolved "https://registry.npmmirror.com/dir-glob/-/dir-glob-3.0.1.tgz" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + +dns-equal@^1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/dns-equal/-/dns-equal-1.0.0.tgz" + integrity sha512-z+paD6YUQsk+AbGCEM4PrOXSss5gd66QfcVBFTKR/HpFL9jCqikS94HYwKww6fQyO7IxrIIyUu+g0Ka9tUS2Cg== + +dns-packet@^5.2.2: + version "5.4.0" + resolved "https://registry.npmmirror.com/dns-packet/-/dns-packet-5.4.0.tgz" + integrity sha512-EgqGeaBB8hLiHLZtp/IbaDQTL8pZ0+IvwzSHA6d7VyMDM+B9hgddEMa9xjK5oYnw0ci0JQ6g2XCD7/f6cafU6g== + dependencies: + "@leichtgewicht/ip-codec" "^2.0.1" + +dom-converter@^0.2.0: + version "0.2.0" + resolved "https://registry.npmmirror.com/dom-converter/-/dom-converter-0.2.0.tgz" + integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== + dependencies: + utila "~0.4" + +dom-serializer@0: + version "0.2.2" + resolved "https://registry.npmmirror.com/dom-serializer/-/dom-serializer-0.2.2.tgz" + integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== + dependencies: + domelementtype "^2.0.1" + entities "^2.0.0" + +dom-serializer@^1.0.1: + version "1.4.1" + resolved "https://registry.npmmirror.com/dom-serializer/-/dom-serializer-1.4.1.tgz" + integrity sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag== + dependencies: + domelementtype "^2.0.1" + domhandler "^4.2.0" + entities "^2.0.0" + +dom-serializer@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/dom-serializer/-/dom-serializer-2.0.0.tgz" + integrity sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg== + dependencies: + domelementtype "^2.3.0" + domhandler "^5.0.2" + entities "^4.2.0" + +domelementtype@1: + version "1.3.1" + resolved "https://registry.npmmirror.com/domelementtype/-/domelementtype-1.3.1.tgz" + integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== + +domelementtype@^2.0.1, domelementtype@^2.2.0, domelementtype@^2.3.0: + version "2.3.0" + resolved "https://registry.npmmirror.com/domelementtype/-/domelementtype-2.3.0.tgz" + integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== + +domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1: + version "4.3.1" + resolved "https://registry.npmmirror.com/domhandler/-/domhandler-4.3.1.tgz" + integrity sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ== + dependencies: + domelementtype "^2.2.0" + +domhandler@^5.0.1, domhandler@^5.0.2, domhandler@^5.0.3: + version "5.0.3" + resolved "https://registry.npmmirror.com/domhandler/-/domhandler-5.0.3.tgz" + integrity sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w== + dependencies: + domelementtype "^2.3.0" + +domutils@^1.7.0: + version "1.7.0" + resolved "https://registry.npmmirror.com/domutils/-/domutils-1.7.0.tgz" + integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== + dependencies: + dom-serializer "0" + domelementtype "1" + +domutils@^2.5.2, domutils@^2.8.0: + version "2.8.0" + resolved "https://registry.npmmirror.com/domutils/-/domutils-2.8.0.tgz" + integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A== + dependencies: + dom-serializer "^1.0.1" + domelementtype "^2.2.0" + domhandler "^4.2.0" + +domutils@^3.0.1: + version "3.0.1" + resolved "https://registry.npmmirror.com/domutils/-/domutils-3.0.1.tgz" + integrity sha512-z08c1l761iKhDFtfXO04C7kTdPBLi41zwOZl00WS8b5eiaebNpY00HKbztwBq+e3vyqWNwWF3mP9YLUeqIrF+Q== + dependencies: + dom-serializer "^2.0.0" + domelementtype "^2.3.0" + domhandler "^5.0.1" + +dot-case@^3.0.4: + version "3.0.4" + resolved "https://registry.npmmirror.com/dot-case/-/dot-case-3.0.4.tgz" + integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + +dot-prop@^5.2.0: + version "5.3.0" + resolved "https://registry.npmmirror.com/dot-prop/-/dot-prop-5.3.0.tgz" + integrity sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q== + dependencies: + is-obj "^2.0.0" + +duplexer@^0.1.1, duplexer@^0.1.2: + version "0.1.2" + resolved "https://registry.npmmirror.com/duplexer/-/duplexer-0.1.2.tgz" + integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== + +ee-first@1.1.1: + version "1.1.1" + resolved "https://registry.npmmirror.com/ee-first/-/ee-first-1.1.1.tgz" + integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== + +electron-to-chromium@^1.5.4: + version "1.5.6" + resolved "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.6.tgz" + integrity sha512-jwXWsM5RPf6j9dPYzaorcBSUg6AiqocPEyMpkchkvntaH9HGfOOMZwxMJjDY/XEs3T5dM7uyH1VhRMkqUU9qVw== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.npmmirror.com/emoji-regex/-/emoji-regex-8.0.0.tgz" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +emojis-list@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/emojis-list/-/emojis-list-3.0.0.tgz" + integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== + +emoticon@^3.2.0: + version "3.2.0" + resolved "https://registry.npmmirror.com/emoticon/-/emoticon-3.2.0.tgz" + integrity sha512-SNujglcLTTg+lDAcApPNgEdudaqQFiAbJCqzjNxJkvN9vAwCGi0uu8IUVvx+f16h+V44KCY6Y2yboroc9pilHg== + +encodeurl@~1.0.2: + version "1.0.2" + resolved "https://registry.npmmirror.com/encodeurl/-/encodeurl-1.0.2.tgz" + integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== + +end-of-stream@^1.1.0: + version "1.4.4" + resolved "https://registry.npmmirror.com/end-of-stream/-/end-of-stream-1.4.4.tgz" + integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== + dependencies: + once "^1.4.0" + +enhanced-resolve@^5.17.0: + version "5.17.1" + resolved "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz" + integrity sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg== + dependencies: + graceful-fs "^4.2.4" + tapable "^2.2.0" + +entities@^2.0.0: + version "2.2.0" + resolved "https://registry.npmmirror.com/entities/-/entities-2.2.0.tgz" + integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== + +entities@^4.2.0, entities@^4.3.0, entities@^4.4.0: + version "4.4.0" + resolved "https://registry.npmmirror.com/entities/-/entities-4.4.0.tgz" + integrity sha512-oYp7156SP8LkeGD0GF85ad1X9Ai79WtRsZ2gxJqtBuzH+98YUV6jkHEKlZkMbcrjJjIVJNIDP/3WL9wQkoPbWA== + +error-ex@^1.3.1: + version "1.3.2" + resolved "https://registry.npmmirror.com/error-ex/-/error-ex-1.3.2.tgz" + integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== + dependencies: + is-arrayish "^0.2.1" + +es-abstract@^1.17.2, es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.5, es-abstract@^1.20.1, es-abstract@^1.20.4: + version "1.20.4" + resolved "https://registry.npmmirror.com/es-abstract/-/es-abstract-1.20.4.tgz" + integrity sha512-0UtvRN79eMe2L+UNEF1BwRe364sj/DXhQ/k5FmivgoSdpM90b8Jc0mDzKMGo7QS0BVbOP/bTwBKNnDc9rNzaPA== + dependencies: + call-bind "^1.0.2" + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + function.prototype.name "^1.1.5" + get-intrinsic "^1.1.3" + get-symbol-description "^1.0.0" + has "^1.0.3" + has-property-descriptors "^1.0.0" + has-symbols "^1.0.3" + internal-slot "^1.0.3" + is-callable "^1.2.7" + is-negative-zero "^2.0.2" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.2" + is-string "^1.0.7" + is-weakref "^1.0.2" + object-inspect "^1.12.2" + object-keys "^1.1.1" + object.assign "^4.1.4" + regexp.prototype.flags "^1.4.3" + safe-regex-test "^1.0.0" + string.prototype.trimend "^1.0.5" + string.prototype.trimstart "^1.0.5" + unbox-primitive "^1.0.2" + +es-array-method-boxes-properly@^1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz" + integrity sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA== + +es-module-lexer@^1.2.1: + version "1.5.4" + resolved "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.5.4.tgz" + integrity sha512-MVNK56NiMrOwitFB7cqDwq0CQutbw+0BvLshJSse0MUNU+y1FC3bUS/AQg7oUng+/wKrrki7JfmwtVHkVfPLlw== + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "https://registry.npmmirror.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +escalade@^3.1.2: + version "3.1.2" + resolved "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz" + integrity sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA== + +escape-goat@^2.0.0: + version "2.1.1" + resolved "https://registry.npmmirror.com/escape-goat/-/escape-goat-2.1.1.tgz" + integrity sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q== + +escape-html@^1.0.3, escape-html@~1.0.3: + version "1.0.3" + resolved "https://registry.npmmirror.com/escape-html/-/escape-html-1.0.3.tgz" + integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== + +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.npmmirror.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" + integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== + +escape-string-regexp@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + +escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "https://registry.npmmirror.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + +eslint-scope@5.1.1: + version "5.1.1" + resolved "https://registry.npmmirror.com/eslint-scope/-/eslint-scope-5.1.1.tgz" + integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== + dependencies: + esrecurse "^4.3.0" + estraverse "^4.1.1" + +esprima@^4.0.0: + version "4.0.1" + resolved "https://registry.npmmirror.com/esprima/-/esprima-4.0.1.tgz" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +esrecurse@^4.3.0: + version "4.3.0" + resolved "https://registry.npmmirror.com/esrecurse/-/esrecurse-4.3.0.tgz" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + +estraverse@^4.1.1: + version "4.3.0" + resolved "https://registry.npmmirror.com/estraverse/-/estraverse-4.3.0.tgz" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + +estraverse@^5.2.0: + version "5.3.0" + resolved "https://registry.npmmirror.com/estraverse/-/estraverse-5.3.0.tgz" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.npmmirror.com/esutils/-/esutils-2.0.3.tgz" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +eta@2.0.0, eta@^1.12.3: + version "2.0.0" + resolved "https://registry.yarnpkg.com/eta/-/eta-2.0.0.tgz#376865fadebc899e5b6dfce82fae64cbbe47e594" + integrity sha512-NqE7S2VmVwgMS8yBxsH4VgNQjNjLq1gfGU0u9I6Cjh468nPRMoDfGdK9n1p/3Dvsw3ebklDkZsFAnKJ9sefjBA== + +etag@~1.8.1: + version "1.8.1" + resolved "https://registry.npmmirror.com/etag/-/etag-1.8.1.tgz" + integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== + +eval@^0.1.8: + version "0.1.8" + resolved "https://registry.npmmirror.com/eval/-/eval-0.1.8.tgz" + integrity sha512-EzV94NYKoO09GLXGjXj9JIlXijVck4ONSr5wiCWDvhsvj5jxSrzTmRU/9C1DyB6uToszLs8aifA6NQ7lEQdvFw== + dependencies: + "@types/node" "*" + require-like ">= 0.1.1" + +eventemitter3@^4.0.0: + version "4.0.7" + resolved "https://registry.npmmirror.com/eventemitter3/-/eventemitter3-4.0.7.tgz" + integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== + +events@^3.2.0: + version "3.3.0" + resolved "https://registry.npmmirror.com/events/-/events-3.3.0.tgz" + integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== + +execa@^5.0.0: + version "5.1.1" + resolved "https://registry.npmmirror.com/execa/-/execa-5.1.1.tgz" + integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== + dependencies: + cross-spawn "^7.0.3" + get-stream "^6.0.0" + human-signals "^2.1.0" + is-stream "^2.0.0" + merge-stream "^2.0.0" + npm-run-path "^4.0.1" + onetime "^5.1.2" + signal-exit "^3.0.3" + strip-final-newline "^2.0.0" + +express@^4.17.3: + version "4.18.2" + resolved "https://registry.npmmirror.com/express/-/express-4.18.2.tgz" + integrity sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ== + dependencies: + accepts "~1.3.8" + array-flatten "1.1.1" + body-parser "1.20.1" + content-disposition "0.5.4" + content-type "~1.0.4" + cookie "0.5.0" + cookie-signature "1.0.6" + debug "2.6.9" + depd "2.0.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + finalhandler "1.2.0" + fresh "0.5.2" + http-errors "2.0.0" + merge-descriptors "1.0.1" + methods "~1.1.2" + on-finished "2.4.1" + parseurl "~1.3.3" + path-to-regexp "0.1.7" + proxy-addr "~2.0.7" + qs "6.11.0" + range-parser "~1.2.1" + safe-buffer "5.2.1" + send "0.18.0" + serve-static "1.15.0" + setprototypeof "1.2.0" + statuses "2.0.1" + type-is "~1.6.18" + utils-merge "1.0.1" + vary "~1.1.2" + +extend-shallow@^2.0.1: + version "2.0.1" + resolved "https://registry.npmmirror.com/extend-shallow/-/extend-shallow-2.0.1.tgz" + integrity sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug== + dependencies: + is-extendable "^0.1.0" + +extend@^3.0.0: + version "3.0.2" + resolved "https://registry.npmmirror.com/extend/-/extend-3.0.2.tgz" + integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== + +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "https://registry.npmmirror.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-glob@^3.2.7, fast-glob@^3.2.9: + version "3.2.12" + resolved "https://registry.npmmirror.com/fast-glob/-/fast-glob-3.2.12.tgz" + integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + +fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.npmmirror.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-url-parser@1.1.3: + version "1.1.3" + resolved "https://registry.npmmirror.com/fast-url-parser/-/fast-url-parser-1.1.3.tgz" + integrity sha512-5jOCVXADYNuRkKFzNJ0dCCewsZiYo0dz8QNYljkOpFC6r2U4OBmKtvm/Tsuh4w1YYdDqDb31a8TVhBJ2OJKdqQ== + dependencies: + punycode "^1.3.2" + +fastq@^1.6.0: + version "1.13.0" + resolved "https://registry.npmmirror.com/fastq/-/fastq-1.13.0.tgz" + integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== + dependencies: + reusify "^1.0.4" + +faye-websocket@^0.11.3: + version "0.11.4" + resolved "https://registry.npmmirror.com/faye-websocket/-/faye-websocket-0.11.4.tgz" + integrity sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g== + dependencies: + websocket-driver ">=0.5.1" + +fbemitter@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/fbemitter/-/fbemitter-3.0.0.tgz" + integrity sha512-KWKaceCwKQU0+HPoop6gn4eOHk50bBv/VxjJtGMfwmJt3D29JpN4H4eisCtIPA+a8GVBam+ldMMpMjJUvpDyHw== + dependencies: + fbjs "^3.0.0" + +fbjs-css-vars@^1.0.0: + version "1.0.2" + resolved "https://registry.npmmirror.com/fbjs-css-vars/-/fbjs-css-vars-1.0.2.tgz" + integrity sha512-b2XGFAFdWZWg0phtAWLHCk836A1Xann+I+Dgd3Gk64MHKZO44FfoD1KxyvbSh0qZsIoXQGGlVztIY+oitJPpRQ== + +fbjs@^3.0.0, fbjs@^3.0.1: + version "3.0.4" + resolved "https://registry.npmmirror.com/fbjs/-/fbjs-3.0.4.tgz" + integrity sha512-ucV0tDODnGV3JCnnkmoszb5lf4bNpzjv80K41wd4k798Etq+UYD0y0TIfalLjZoKgjive6/adkRnszwapiDgBQ== + dependencies: + cross-fetch "^3.1.5" + fbjs-css-vars "^1.0.0" + loose-envify "^1.0.0" + object-assign "^4.1.0" + promise "^7.1.1" + setimmediate "^1.0.5" + ua-parser-js "^0.7.30" + +feed@^4.2.2: + version "4.2.2" + resolved "https://registry.npmmirror.com/feed/-/feed-4.2.2.tgz" + integrity sha512-u5/sxGfiMfZNtJ3OvQpXcvotFpYkL0n9u9mM2vkui2nGo8b4wvDkJ8gAkYqbA8QpGyFCv3RK0Z+Iv+9veCS9bQ== + dependencies: + xml-js "^1.6.11" + +file-loader@^6.2.0: + version "6.2.0" + resolved "https://registry.npmmirror.com/file-loader/-/file-loader-6.2.0.tgz" + integrity sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw== + dependencies: + loader-utils "^2.0.0" + schema-utils "^3.0.0" + +filesize@^6.1.0: + version "6.4.0" + resolved "https://registry.npmmirror.com/filesize/-/filesize-6.4.0.tgz" + integrity sha512-mjFIpOHC4jbfcTfoh4rkWpI31mF7viw9ikj/JyLoKzqlwG/YsefKfvYlYhdYdg/9mtK2z1AzgN/0LvVQ3zdlSQ== + +fill-range@^7.0.1: + version "7.0.1" + resolved "https://registry.npmmirror.com/fill-range/-/fill-range-7.0.1.tgz" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + dependencies: + to-regex-range "^5.0.1" + +finalhandler@1.2.0: + version "1.2.0" + resolved "https://registry.npmmirror.com/finalhandler/-/finalhandler-1.2.0.tgz" + integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== + dependencies: + debug "2.6.9" + encodeurl "~1.0.2" + escape-html "~1.0.3" + on-finished "2.4.1" + parseurl "~1.3.3" + statuses "2.0.1" + unpipe "~1.0.0" + +find-cache-dir@^3.3.1: + version "3.3.2" + resolved "https://registry.npmmirror.com/find-cache-dir/-/find-cache-dir-3.3.2.tgz" + integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig== + dependencies: + commondir "^1.0.1" + make-dir "^3.0.2" + pkg-dir "^4.1.0" + +find-up@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/find-up/-/find-up-3.0.0.tgz" + integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== + dependencies: + locate-path "^3.0.0" + +find-up@^4.0.0, find-up@^4.1.0: + version "4.1.0" + resolved "https://registry.npmmirror.com/find-up/-/find-up-4.1.0.tgz" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + +find-up@^5.0.0: + version "5.0.0" + resolved "https://registry.npmmirror.com/find-up/-/find-up-5.0.0.tgz" + integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== + dependencies: + locate-path "^6.0.0" + path-exists "^4.0.0" + +flux@^4.0.1: + version "4.0.3" + resolved "https://registry.npmmirror.com/flux/-/flux-4.0.3.tgz" + integrity sha512-yKAbrp7JhZhj6uiT1FTuVMlIAT1J4jqEyBpFApi1kxpGZCvacMVc/t1pMQyotqHhAgvoE3bNvAykhCo2CLjnYw== + dependencies: + fbemitter "^3.0.0" + fbjs "^3.0.1" + +follow-redirects@^1.0.0, follow-redirects@^1.14.7: + version "1.15.2" + resolved "https://registry.npmmirror.com/follow-redirects/-/follow-redirects-1.15.2.tgz" + integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== + +fork-ts-checker-webpack-plugin@^6.0.5: + version "6.5.2" + resolved "https://registry.npmmirror.com/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.2.tgz" + integrity sha512-m5cUmF30xkZ7h4tWUgTAcEaKmUW7tfyUyTqNNOz7OxWJ0v1VWKTcOvH8FWHUwSjlW/356Ijc9vi3XfcPstpQKA== + dependencies: + "@babel/code-frame" "^7.8.3" + "@types/json-schema" "^7.0.5" + chalk "^4.1.0" + chokidar "^3.4.2" + cosmiconfig "^6.0.0" + deepmerge "^4.2.2" + fs-extra "^9.0.0" + glob "^7.1.6" + memfs "^3.1.2" + minimatch "^3.0.4" + schema-utils "2.7.0" + semver "^7.3.2" + tapable "^1.0.0" + +forwarded@0.2.0: + version "0.2.0" + resolved "https://registry.npmmirror.com/forwarded/-/forwarded-0.2.0.tgz" + integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== + +fraction.js@^4.2.0: + version "4.2.0" + resolved "https://registry.npmmirror.com/fraction.js/-/fraction.js-4.2.0.tgz" + integrity sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA== + +fresh@0.5.2: + version "0.5.2" + resolved "https://registry.npmmirror.com/fresh/-/fresh-0.5.2.tgz" + integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== + +fs-extra@^10.0.0, fs-extra@^10.1.0: + version "10.1.0" + resolved "https://registry.npmmirror.com/fs-extra/-/fs-extra-10.1.0.tgz" + integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fs-extra@^9.0.0, fs-extra@^9.0.1: + version "9.1.0" + resolved "https://registry.npmmirror.com/fs-extra/-/fs-extra-9.1.0.tgz" + integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== + dependencies: + at-least-node "^1.0.0" + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fs-monkey@^1.0.3: + version "1.0.3" + resolved "https://registry.npmmirror.com/fs-monkey/-/fs-monkey-1.0.3.tgz" + integrity sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q== + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/fs.realpath/-/fs.realpath-1.0.0.tgz" + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== + +fsevents@~2.3.2: + version "2.3.2" + resolved "https://registry.npmmirror.com/fsevents/-/fsevents-2.3.2.tgz" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.npmmirror.com/function-bind/-/function-bind-1.1.1.tgz" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +function.prototype.name@^1.1.5: + version "1.1.5" + resolved "https://registry.npmmirror.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz" + integrity sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.0" + functions-have-names "^1.2.2" + +functions-have-names@^1.2.2: + version "1.2.3" + resolved "https://registry.npmmirror.com/functions-have-names/-/functions-have-names-1.2.3.tgz" + integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== + +gensync@^1.0.0-beta.1, gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "https://registry.npmmirror.com/gensync/-/gensync-1.0.0-beta.2.tgz" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + +get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3: + version "1.1.3" + resolved "https://registry.npmmirror.com/get-intrinsic/-/get-intrinsic-1.1.3.tgz" + integrity sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.3" + +get-own-enumerable-property-symbols@^3.0.0: + version "3.0.2" + resolved "https://registry.npmmirror.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz" + integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g== + +get-stream@^5.1.0: + version "5.2.0" + resolved "https://registry.npmmirror.com/get-stream/-/get-stream-5.2.0.tgz" + integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== + dependencies: + pump "^3.0.0" + +get-stream@^6.0.0: + version "6.0.1" + resolved "https://registry.npmmirror.com/get-stream/-/get-stream-6.0.1.tgz" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + +get-symbol-description@^1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz" + integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.1" + +github-slugger@^1.4.0: + version "1.5.0" + resolved "https://registry.npmmirror.com/github-slugger/-/github-slugger-1.5.0.tgz" + integrity sha512-wIh+gKBI9Nshz2o46B0B3f5k/W+WI9ZAv6y5Dn5WJ5SK1t0TnDimB4WE5rmTD05ZAIn8HALCZVmCsvj0w0v0lw== + +glob-parent@^5.1.2, glob-parent@~5.1.2: + version "5.1.2" + resolved "https://registry.npmmirror.com/glob-parent/-/glob-parent-5.1.2.tgz" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob-parent@^6.0.1: + version "6.0.2" + resolved "https://registry.npmmirror.com/glob-parent/-/glob-parent-6.0.2.tgz" + integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== + dependencies: + is-glob "^4.0.3" + +glob-to-regexp@^0.4.1: + version "0.4.1" + resolved "https://registry.npmmirror.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz" + integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== + +glob@^7.0.0, glob@^7.1.3, glob@^7.1.6: + version "7.2.3" + resolved "https://registry.npmmirror.com/glob/-/glob-7.2.3.tgz" + integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.1.1" + once "^1.3.0" + path-is-absolute "^1.0.0" + +global-dirs@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/global-dirs/-/global-dirs-3.0.0.tgz" + integrity sha512-v8ho2DS5RiCjftj1nD9NmnfaOzTdud7RRnVd9kFNOjqZbISlx5DQ+OrTkywgd0dIt7oFCvKetZSHoHcP3sDdiA== + dependencies: + ini "2.0.0" + +global-modules@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/global-modules/-/global-modules-2.0.0.tgz" + integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== + dependencies: + global-prefix "^3.0.0" + +global-prefix@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/global-prefix/-/global-prefix-3.0.0.tgz" + integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== + dependencies: + ini "^1.3.5" + kind-of "^6.0.2" + which "^1.3.1" + +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.npmmirror.com/globals/-/globals-11.12.0.tgz" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +globby@^11.0.1, globby@^11.0.2, globby@^11.0.3, globby@^11.0.4, globby@^11.1.0: + version "11.1.0" + resolved "https://registry.npmmirror.com/globby/-/globby-11.1.0.tgz" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^3.0.0" + +got@11.8.5, got@^9.6.0: + version "11.8.5" + resolved "https://registry.yarnpkg.com/got/-/got-11.8.5.tgz#ce77d045136de56e8f024bebb82ea349bc730046" + integrity sha512-o0Je4NvQObAuZPHLFoRSkdG2lTgtcynqymzg2Vupdx6PorhaT5MCbIyXG6d4D94kk8ZG57QeosgdiqfJWhEhlQ== + dependencies: + "@sindresorhus/is" "^4.0.0" + "@szmarczak/http-timer" "^4.0.5" + "@types/cacheable-request" "^6.0.1" + "@types/responselike" "^1.0.0" + cacheable-lookup "^5.0.3" + cacheable-request "^7.0.2" + decompress-response "^6.0.0" + http2-wrapper "^1.0.0-beta.5.2" + lowercase-keys "^2.0.0" + p-cancelable "^2.0.0" + responselike "^2.0.0" + +graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.11, graceful-fs@^4.2.4, graceful-fs@^4.2.6: + version "4.2.11" + resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== + +gray-matter@^4.0.3: + version "4.0.3" + resolved "https://registry.npmmirror.com/gray-matter/-/gray-matter-4.0.3.tgz" + integrity sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q== + dependencies: + js-yaml "^3.13.1" + kind-of "^6.0.2" + section-matter "^1.0.0" + strip-bom-string "^1.0.0" + +gzip-size@^5.1.1: + version "5.1.1" + resolved "https://registry.npmmirror.com/gzip-size/-/gzip-size-5.1.1.tgz" + integrity sha512-FNHi6mmoHvs1mxZAds4PpdCS6QG8B4C1krxJsMutgxl5t3+GlRTzzI3NEkifXx2pVsOvJdOGSmIgDhQ55FwdPA== + dependencies: + duplexer "^0.1.1" + pify "^4.0.1" + +gzip-size@^6.0.0: + version "6.0.0" + resolved "https://registry.npmmirror.com/gzip-size/-/gzip-size-6.0.0.tgz" + integrity sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q== + dependencies: + duplexer "^0.1.2" + +handle-thing@^2.0.0: + version "2.0.1" + resolved "https://registry.npmmirror.com/handle-thing/-/handle-thing-2.0.1.tgz" + integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg== + +has-bigints@^1.0.1, has-bigints@^1.0.2: + version "1.0.2" + resolved "https://registry.npmmirror.com/has-bigints/-/has-bigints-1.0.2.tgz" + integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/has-flag/-/has-flag-3.0.0.tgz" + integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.npmmirror.com/has-flag/-/has-flag-4.0.0.tgz" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-property-descriptors@^1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz" + integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== + dependencies: + get-intrinsic "^1.1.1" + +has-symbols@^1.0.1, has-symbols@^1.0.2, has-symbols@^1.0.3: + version "1.0.3" + resolved "https://registry.npmmirror.com/has-symbols/-/has-symbols-1.0.3.tgz" + integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== + +has-tostringtag@^1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz" + integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== + dependencies: + has-symbols "^1.0.2" + +has-yarn@^2.1.0: + version "2.1.0" + resolved "https://registry.npmmirror.com/has-yarn/-/has-yarn-2.1.0.tgz" + integrity sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw== + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.npmmirror.com/has/-/has-1.0.3.tgz" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +hast-to-hyperscript@^9.0.0: + version "9.0.1" + resolved "https://registry.npmmirror.com/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz" + integrity sha512-zQgLKqF+O2F72S1aa4y2ivxzSlko3MAvxkwG8ehGmNiqd98BIN3JM1rAJPmplEyLmGLO2QZYJtIneOSZ2YbJuA== + dependencies: + "@types/unist" "^2.0.3" + comma-separated-tokens "^1.0.0" + property-information "^5.3.0" + space-separated-tokens "^1.0.0" + style-to-object "^0.3.0" + unist-util-is "^4.0.0" + web-namespaces "^1.0.0" + +hast-util-from-parse5@^5.0.0: + version "5.0.3" + resolved "https://registry.npmmirror.com/hast-util-from-parse5/-/hast-util-from-parse5-5.0.3.tgz" + integrity sha512-gOc8UB99F6eWVWFtM9jUikjN7QkWxB3nY0df5Z0Zq1/Nkwl5V4hAAsl0tmwlgWl/1shlTF8DnNYLO8X6wRV9pA== + dependencies: + ccount "^1.0.3" + hastscript "^5.0.0" + property-information "^5.0.0" + web-namespaces "^1.1.2" + xtend "^4.0.1" + +hast-util-from-parse5@^6.0.0: + version "6.0.1" + resolved "https://registry.npmmirror.com/hast-util-from-parse5/-/hast-util-from-parse5-6.0.1.tgz" + integrity sha512-jeJUWiN5pSxW12Rh01smtVkZgZr33wBokLzKLwinYOUfSzm1Nl/c3GUGebDyOKjdsRgMvoVbV0VpAcpjF4NrJA== + dependencies: + "@types/parse5" "^5.0.0" + hastscript "^6.0.0" + property-information "^5.0.0" + vfile "^4.0.0" + vfile-location "^3.2.0" + web-namespaces "^1.0.0" + +hast-util-is-element@1.1.0, hast-util-is-element@^1.0.0: + version "1.1.0" + resolved "https://registry.npmmirror.com/hast-util-is-element/-/hast-util-is-element-1.1.0.tgz" + integrity sha512-oUmNua0bFbdrD/ELDSSEadRVtWZOf3iF6Lbv81naqsIV99RnSCieTbWuWCY8BAeEfKJTKl0gRdokv+dELutHGQ== + +hast-util-parse-selector@^2.0.0: + version "2.2.5" + resolved "https://registry.npmmirror.com/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz" + integrity sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ== + +hast-util-raw@6.0.1: + version "6.0.1" + resolved "https://registry.npmmirror.com/hast-util-raw/-/hast-util-raw-6.0.1.tgz" + integrity sha512-ZMuiYA+UF7BXBtsTBNcLBF5HzXzkyE6MLzJnL605LKE8GJylNjGc4jjxazAHUtcwT5/CEt6afRKViYB4X66dig== + dependencies: + "@types/hast" "^2.0.0" + hast-util-from-parse5 "^6.0.0" + hast-util-to-parse5 "^6.0.0" + html-void-elements "^1.0.0" + parse5 "^6.0.0" + unist-util-position "^3.0.0" + vfile "^4.0.0" + web-namespaces "^1.0.0" + xtend "^4.0.0" + zwitch "^1.0.0" + +hast-util-to-parse5@^6.0.0: + version "6.0.0" + resolved "https://registry.npmmirror.com/hast-util-to-parse5/-/hast-util-to-parse5-6.0.0.tgz" + integrity sha512-Lu5m6Lgm/fWuz8eWnrKezHtVY83JeRGaNQ2kn9aJgqaxvVkFCZQBEhgodZUDUvoodgyROHDb3r5IxAEdl6suJQ== + dependencies: + hast-to-hyperscript "^9.0.0" + property-information "^5.0.0" + web-namespaces "^1.0.0" + xtend "^4.0.0" + zwitch "^1.0.0" + +hast-util-to-text@^2.0.0: + version "2.0.1" + resolved "https://registry.npmmirror.com/hast-util-to-text/-/hast-util-to-text-2.0.1.tgz" + integrity sha512-8nsgCARfs6VkwH2jJU9b8LNTuR4700na+0h3PqCaEk4MAnMDeu5P0tP8mjk9LLNGxIeQRLbiDbZVw6rku+pYsQ== + dependencies: + hast-util-is-element "^1.0.0" + repeat-string "^1.0.0" + unist-util-find-after "^3.0.0" + +hastscript@^5.0.0: + version "5.1.2" + resolved "https://registry.npmmirror.com/hastscript/-/hastscript-5.1.2.tgz" + integrity sha512-WlztFuK+Lrvi3EggsqOkQ52rKbxkXL3RwB6t5lwoa8QLMemoWfBuL43eDrwOamJyR7uKQKdmKYaBH1NZBiIRrQ== + dependencies: + comma-separated-tokens "^1.0.0" + hast-util-parse-selector "^2.0.0" + property-information "^5.0.0" + space-separated-tokens "^1.0.0" + +hastscript@^6.0.0: + version "6.0.0" + resolved "https://registry.npmmirror.com/hastscript/-/hastscript-6.0.0.tgz" + integrity sha512-nDM6bvd7lIqDUiYEiu5Sl/+6ReP0BMk/2f4U/Rooccxkj0P5nm+acM5PrGJ/t5I8qPGiqZSE6hVAwZEdZIvP4w== + dependencies: + "@types/hast" "^2.0.0" + comma-separated-tokens "^1.0.0" + hast-util-parse-selector "^2.0.0" + property-information "^5.0.0" + space-separated-tokens "^1.0.0" + +he@^1.2.0: + version "1.2.0" + resolved "https://registry.npmmirror.com/he/-/he-1.2.0.tgz" + integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== + +history@^4.9.0: + version "4.10.1" + resolved "https://registry.npmmirror.com/history/-/history-4.10.1.tgz" + integrity sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew== + dependencies: + "@babel/runtime" "^7.1.2" + loose-envify "^1.2.0" + resolve-pathname "^3.0.0" + tiny-invariant "^1.0.2" + tiny-warning "^1.0.0" + value-equal "^1.0.1" + +hoist-non-react-statics@^3.1.0: + version "3.3.2" + resolved "https://registry.npmmirror.com/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz" + integrity sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw== + dependencies: + react-is "^16.7.0" + +hpack.js@^2.1.6: + version "2.1.6" + resolved "https://registry.npmmirror.com/hpack.js/-/hpack.js-2.1.6.tgz" + integrity sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ== + dependencies: + inherits "^2.0.1" + obuf "^1.0.0" + readable-stream "^2.0.1" + wbuf "^1.1.0" + +html-entities@^2.3.2: + version "2.3.3" + resolved "https://registry.npmmirror.com/html-entities/-/html-entities-2.3.3.tgz" + integrity sha512-DV5Ln36z34NNTDgnz0EWGBLZENelNAtkiFA4kyNOG2tDI6Mz1uSWiq1wAKdyjnJwyDiDO7Fa2SO1CTxPXL8VxA== + +html-minifier-terser@^6.0.2: + version "6.1.0" + resolved "https://registry.npmmirror.com/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz" + integrity sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw== + dependencies: + camel-case "^4.1.2" + clean-css "^5.2.2" + commander "^8.3.0" + he "^1.2.0" + param-case "^3.0.4" + relateurl "^0.2.7" + terser "^5.10.0" + +html-tags@^3.1.0: + version "3.2.0" + resolved "https://registry.npmmirror.com/html-tags/-/html-tags-3.2.0.tgz" + integrity sha512-vy7ClnArOZwCnqZgvv+ddgHgJiAFXe3Ge9ML5/mBctVJoUoYPCdxVucOywjDARn6CVoh3dRSFdPHy2sX80L0Wg== + +html-void-elements@^1.0.0: + version "1.0.5" + resolved "https://registry.npmmirror.com/html-void-elements/-/html-void-elements-1.0.5.tgz" + integrity sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w== + +html-webpack-plugin@^5.4.0: + version "5.5.0" + resolved "https://registry.npmmirror.com/html-webpack-plugin/-/html-webpack-plugin-5.5.0.tgz" + integrity sha512-sy88PC2cRTVxvETRgUHFrL4No3UxvcH8G1NepGhqaTT+GXN2kTamqasot0inS5hXeg1cMbFDt27zzo9p35lZVw== + dependencies: + "@types/html-minifier-terser" "^6.0.0" + html-minifier-terser "^6.0.2" + lodash "^4.17.21" + pretty-error "^4.0.0" + tapable "^2.0.0" + +htmlparser2@^6.1.0: + version "6.1.0" + resolved "https://registry.npmmirror.com/htmlparser2/-/htmlparser2-6.1.0.tgz" + integrity sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A== + dependencies: + domelementtype "^2.0.1" + domhandler "^4.0.0" + domutils "^2.5.2" + entities "^2.0.0" + +htmlparser2@^8.0.1: + version "8.0.1" + resolved "https://registry.npmmirror.com/htmlparser2/-/htmlparser2-8.0.1.tgz" + integrity sha512-4lVbmc1diZC7GUJQtRQ5yBAeUCL1exyMwmForWkRLnwyzWBFxN633SALPMGYaWZvKe9j1pRZJpauvmxENSp/EA== + dependencies: + domelementtype "^2.3.0" + domhandler "^5.0.2" + domutils "^3.0.1" + entities "^4.3.0" + +http-cache-semantics@4.1.1, http-cache-semantics@^4.0.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a" + integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ== + +http-deceiver@^1.2.7: + version "1.2.7" + resolved "https://registry.npmmirror.com/http-deceiver/-/http-deceiver-1.2.7.tgz" + integrity sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw== + +http-errors@2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/http-errors/-/http-errors-2.0.0.tgz" + integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== + dependencies: + depd "2.0.0" + inherits "2.0.4" + setprototypeof "1.2.0" + statuses "2.0.1" + toidentifier "1.0.1" + +http-errors@~1.6.2: + version "1.6.3" + resolved "https://registry.npmmirror.com/http-errors/-/http-errors-1.6.3.tgz" + integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A== + dependencies: + depd "~1.1.2" + inherits "2.0.3" + setprototypeof "1.1.0" + statuses ">= 1.4.0 < 2" + +http-parser-js@>=0.5.1: + version "0.5.8" + resolved "https://registry.npmmirror.com/http-parser-js/-/http-parser-js-0.5.8.tgz" + integrity sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q== + +http-proxy-middleware@^2.0.3: + version "2.0.6" + resolved "https://registry.npmmirror.com/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz" + integrity sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw== + dependencies: + "@types/http-proxy" "^1.17.8" + http-proxy "^1.18.1" + is-glob "^4.0.1" + is-plain-obj "^3.0.0" + micromatch "^4.0.2" + +http-proxy@^1.18.1: + version "1.18.1" + resolved "https://registry.npmmirror.com/http-proxy/-/http-proxy-1.18.1.tgz" + integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== + dependencies: + eventemitter3 "^4.0.0" + follow-redirects "^1.0.0" + requires-port "^1.0.0" + +http2-wrapper@^1.0.0-beta.5.2: + version "1.0.3" + resolved "https://registry.npmmirror.com/http2-wrapper/-/http2-wrapper-1.0.3.tgz" + integrity sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg== + dependencies: + quick-lru "^5.1.1" + resolve-alpn "^1.0.0" + +human-signals@^2.1.0: + version "2.1.0" + resolved "https://registry.npmmirror.com/human-signals/-/human-signals-2.1.0.tgz" + integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== + +iconv-lite@0.4.24: + version "0.4.24" + resolved "https://registry.npmmirror.com/iconv-lite/-/iconv-lite-0.4.24.tgz" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +icss-utils@^5.0.0, icss-utils@^5.1.0: + version "5.1.0" + resolved "https://registry.npmmirror.com/icss-utils/-/icss-utils-5.1.0.tgz" + integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== + +ignore@^5.2.0: + version "5.2.0" + resolved "https://registry.npmmirror.com/ignore/-/ignore-5.2.0.tgz" + integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== + +immediate@^3.2.3: + version "3.3.0" + resolved "https://registry.npmmirror.com/immediate/-/immediate-3.3.0.tgz" + integrity sha512-HR7EVodfFUdQCTIeySw+WDRFJlPcLOJbXfwwZ7Oom6tjsvZ3bOkCDJHehQC3nxJrv7+f9XecwazynjU8e4Vw3Q== + +immer@^9.0.6: + version "9.0.16" + resolved "https://registry.npmmirror.com/immer/-/immer-9.0.16.tgz" + integrity sha512-qenGE7CstVm1NrHQbMh8YaSzTZTFNP3zPqr3YU0S0UY441j4bJTg4A2Hh5KAhwgaiU6ZZ1Ar6y/2f4TblnMReQ== + +import-fresh@^3.1.0, import-fresh@^3.2.1, import-fresh@^3.2.2, import-fresh@^3.3.0: + version "3.3.0" + resolved "https://registry.npmmirror.com/import-fresh/-/import-fresh-3.3.0.tgz" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +import-lazy@^2.1.0: + version "2.1.0" + resolved "https://registry.npmmirror.com/import-lazy/-/import-lazy-2.1.0.tgz" + integrity sha512-m7ZEHgtw69qOGw+jwxXkHlrlIPdTGkyh66zXZ1ajZbxkDBNjSY/LGbmjc7h0s2ELsUDTAhFr55TrPSSqJGPG0A== + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.npmmirror.com/imurmurhash/-/imurmurhash-0.1.4.tgz" + integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== + +indent-string@^4.0.0: + version "4.0.0" + resolved "https://registry.npmmirror.com/indent-string/-/indent-string-4.0.0.tgz" + integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== + +infima@0.2.0-alpha.34: + version "0.2.0-alpha.34" + resolved "https://registry.npmmirror.com/infima/-/infima-0.2.0-alpha.34.tgz" + integrity sha512-Na6A2Tl56i1p9dzu7VOAT1Kmu3f5buz63Wvd+D9ZZWL6siQ47L7wkEZUICVKFgc5gERFZVZ/PoPB57Kl++h37Q== + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.npmmirror.com/inflight/-/inflight-1.0.6.tgz" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@2.0.4, inherits@^2.0.0, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.3: + version "2.0.4" + resolved "https://registry.npmmirror.com/inherits/-/inherits-2.0.4.tgz" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +inherits@2.0.3: + version "2.0.3" + resolved "https://registry.npmmirror.com/inherits/-/inherits-2.0.3.tgz" + integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw== + +ini@2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/ini/-/ini-2.0.0.tgz" + integrity sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA== + +ini@^1.3.5, ini@~1.3.0: + version "1.3.8" + resolved "https://registry.npmmirror.com/ini/-/ini-1.3.8.tgz" + integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== + +inline-style-parser@0.1.1: + version "0.1.1" + resolved "https://registry.npmmirror.com/inline-style-parser/-/inline-style-parser-0.1.1.tgz" + integrity sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q== + +internal-slot@^1.0.3: + version "1.0.3" + resolved "https://registry.npmmirror.com/internal-slot/-/internal-slot-1.0.3.tgz" + integrity sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA== + dependencies: + get-intrinsic "^1.1.0" + has "^1.0.3" + side-channel "^1.0.4" + +interpret@^1.0.0: + version "1.4.0" + resolved "https://registry.npmmirror.com/interpret/-/interpret-1.4.0.tgz" + integrity sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA== + +ipaddr.js@1.9.1: + version "1.9.1" + resolved "https://registry.npmmirror.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz" + integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== + +ipaddr.js@^2.0.1: + version "2.0.1" + resolved "https://registry.npmmirror.com/ipaddr.js/-/ipaddr.js-2.0.1.tgz" + integrity sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng== + +is-alphabetical@1.0.4, is-alphabetical@^1.0.0: + version "1.0.4" + resolved "https://registry.npmmirror.com/is-alphabetical/-/is-alphabetical-1.0.4.tgz" + integrity sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg== + +is-alphanumerical@^1.0.0: + version "1.0.4" + resolved "https://registry.npmmirror.com/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz" + integrity sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A== + dependencies: + is-alphabetical "^1.0.0" + is-decimal "^1.0.0" + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "https://registry.npmmirror.com/is-arrayish/-/is-arrayish-0.2.1.tgz" + integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== + +is-bigint@^1.0.1: + version "1.0.4" + resolved "https://registry.npmmirror.com/is-bigint/-/is-bigint-1.0.4.tgz" + integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== + dependencies: + has-bigints "^1.0.1" + +is-binary-path@~2.1.0: + version "2.1.0" + resolved "https://registry.npmmirror.com/is-binary-path/-/is-binary-path-2.1.0.tgz" + integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== + dependencies: + binary-extensions "^2.0.0" + +is-boolean-object@^1.1.0: + version "1.1.2" + resolved "https://registry.npmmirror.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz" + integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-buffer@^2.0.0: + version "2.0.5" + resolved "https://registry.npmmirror.com/is-buffer/-/is-buffer-2.0.5.tgz" + integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ== + +is-callable@^1.1.4, is-callable@^1.2.7: + version "1.2.7" + resolved "https://registry.npmmirror.com/is-callable/-/is-callable-1.2.7.tgz" + integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== + +is-ci@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/is-ci/-/is-ci-2.0.0.tgz" + integrity sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w== + dependencies: + ci-info "^2.0.0" + +is-core-module@^2.9.0: + version "2.11.0" + resolved "https://registry.npmmirror.com/is-core-module/-/is-core-module-2.11.0.tgz" + integrity sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw== + dependencies: + has "^1.0.3" + +is-date-object@^1.0.1: + version "1.0.5" + resolved "https://registry.npmmirror.com/is-date-object/-/is-date-object-1.0.5.tgz" + integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== + dependencies: + has-tostringtag "^1.0.0" + +is-decimal@^1.0.0: + version "1.0.4" + resolved "https://registry.npmmirror.com/is-decimal/-/is-decimal-1.0.4.tgz" + integrity sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw== + +is-docker@^2.0.0, is-docker@^2.1.1: + version "2.2.1" + resolved "https://registry.npmmirror.com/is-docker/-/is-docker-2.2.1.tgz" + integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== + +is-extendable@^0.1.0: + version "0.1.1" + resolved "https://registry.npmmirror.com/is-extendable/-/is-extendable-0.1.1.tgz" + integrity sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw== + +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.npmmirror.com/is-extglob/-/is-extglob-2.1.1.tgz" + integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: + version "4.0.3" + resolved "https://registry.npmmirror.com/is-glob/-/is-glob-4.0.3.tgz" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== + dependencies: + is-extglob "^2.1.1" + +is-hexadecimal@^1.0.0: + version "1.0.4" + resolved "https://registry.npmmirror.com/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz" + integrity sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw== + +is-installed-globally@^0.4.0: + version "0.4.0" + resolved "https://registry.npmmirror.com/is-installed-globally/-/is-installed-globally-0.4.0.tgz" + integrity sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ== + dependencies: + global-dirs "^3.0.0" + is-path-inside "^3.0.2" + +is-negative-zero@^2.0.2: + version "2.0.2" + resolved "https://registry.npmmirror.com/is-negative-zero/-/is-negative-zero-2.0.2.tgz" + integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== + +is-npm@^5.0.0: + version "5.0.0" + resolved "https://registry.npmmirror.com/is-npm/-/is-npm-5.0.0.tgz" + integrity sha512-WW/rQLOazUq+ST/bCAVBp/2oMERWLsR7OrKyt052dNDk4DHcDE0/7QSXITlmi+VBcV13DfIbysG3tZJm5RfdBA== + +is-number-object@^1.0.4: + version "1.0.7" + resolved "https://registry.npmmirror.com/is-number-object/-/is-number-object-1.0.7.tgz" + integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== + dependencies: + has-tostringtag "^1.0.0" + +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.npmmirror.com/is-number/-/is-number-7.0.0.tgz" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-obj@^1.0.1: + version "1.0.1" + resolved "https://registry.npmmirror.com/is-obj/-/is-obj-1.0.1.tgz" + integrity sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg== + +is-obj@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/is-obj/-/is-obj-2.0.0.tgz" + integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w== + +is-path-cwd@^2.2.0: + version "2.2.0" + resolved "https://registry.npmmirror.com/is-path-cwd/-/is-path-cwd-2.2.0.tgz" + integrity sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ== + +is-path-inside@^3.0.2: + version "3.0.3" + resolved "https://registry.npmmirror.com/is-path-inside/-/is-path-inside-3.0.3.tgz" + integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== + +is-plain-obj@^2.0.0: + version "2.1.0" + resolved "https://registry.npmmirror.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz" + integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA== + +is-plain-obj@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/is-plain-obj/-/is-plain-obj-3.0.0.tgz" + integrity sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA== + +is-plain-object@^2.0.4: + version "2.0.4" + resolved "https://registry.npmmirror.com/is-plain-object/-/is-plain-object-2.0.4.tgz" + integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== + dependencies: + isobject "^3.0.1" + +is-regex@^1.1.4: + version "1.1.4" + resolved "https://registry.npmmirror.com/is-regex/-/is-regex-1.1.4.tgz" + integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-regexp@^1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/is-regexp/-/is-regexp-1.0.0.tgz" + integrity sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA== + +is-root@^2.1.0: + version "2.1.0" + resolved "https://registry.npmmirror.com/is-root/-/is-root-2.1.0.tgz" + integrity sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg== + +is-shared-array-buffer@^1.0.2: + version "1.0.2" + resolved "https://registry.npmmirror.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz" + integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== + dependencies: + call-bind "^1.0.2" + +is-stream@^2.0.0: + version "2.0.1" + resolved "https://registry.npmmirror.com/is-stream/-/is-stream-2.0.1.tgz" + integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== + +is-string@^1.0.5, is-string@^1.0.7: + version "1.0.7" + resolved "https://registry.npmmirror.com/is-string/-/is-string-1.0.7.tgz" + integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== + dependencies: + has-tostringtag "^1.0.0" + +is-symbol@^1.0.2, is-symbol@^1.0.3: + version "1.0.4" + resolved "https://registry.npmmirror.com/is-symbol/-/is-symbol-1.0.4.tgz" + integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== + dependencies: + has-symbols "^1.0.2" + +is-typedarray@^1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/is-typedarray/-/is-typedarray-1.0.0.tgz" + integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== + +is-weakref@^1.0.2: + version "1.0.2" + resolved "https://registry.npmmirror.com/is-weakref/-/is-weakref-1.0.2.tgz" + integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== + dependencies: + call-bind "^1.0.2" + +is-whitespace-character@^1.0.0: + version "1.0.4" + resolved "https://registry.npmmirror.com/is-whitespace-character/-/is-whitespace-character-1.0.4.tgz" + integrity sha512-SDweEzfIZM0SJV0EUga669UTKlmL0Pq8Lno0QDQsPnvECB3IM2aP0gdx5TrU0A01MAPfViaZiI2V1QMZLaKK5w== + +is-word-character@^1.0.0: + version "1.0.4" + resolved "https://registry.npmmirror.com/is-word-character/-/is-word-character-1.0.4.tgz" + integrity sha512-5SMO8RVennx3nZrqtKwCGyyetPE9VDba5ugvKLaD4KopPG5kR4mQ7tNt/r7feL5yt5h3lpuBbIUmCOG2eSzXHA== + +is-wsl@^2.1.1, is-wsl@^2.2.0: + version "2.2.0" + resolved "https://registry.npmmirror.com/is-wsl/-/is-wsl-2.2.0.tgz" + integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== + dependencies: + is-docker "^2.0.0" + +is-yarn-global@^0.3.0: + version "0.3.0" + resolved "https://registry.npmmirror.com/is-yarn-global/-/is-yarn-global-0.3.0.tgz" + integrity sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw== + +isarray@0.0.1: + version "0.0.1" + resolved "https://registry.npmmirror.com/isarray/-/isarray-0.0.1.tgz" + integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ== + +isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/isarray/-/isarray-1.0.0.tgz" + integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/isexe/-/isexe-2.0.0.tgz" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== + +isobject@^3.0.1: + version "3.0.1" + resolved "https://registry.npmmirror.com/isobject/-/isobject-3.0.1.tgz" + integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== + +jest-worker@^27.0.2, jest-worker@^27.4.5: + version "27.5.1" + resolved "https://registry.npmmirror.com/jest-worker/-/jest-worker-27.5.1.tgz" + integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +joi@^17.4.2, joi@^17.6.0: + version "17.7.0" + resolved "https://registry.npmmirror.com/joi/-/joi-17.7.0.tgz" + integrity sha512-1/ugc8djfn93rTE3WRKdCzGGt/EtiYKxITMO4Wiv6q5JL1gl9ePt4kBsl1S499nbosspfctIQTpYIhSmHA3WAg== + dependencies: + "@hapi/hoek" "^9.0.0" + "@hapi/topo" "^5.0.0" + "@sideway/address" "^4.1.3" + "@sideway/formula" "^3.0.0" + "@sideway/pinpoint" "^2.0.0" + +"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.npmmirror.com/js-tokens/-/js-tokens-4.0.0.tgz" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@^3.13.1: + version "3.14.1" + resolved "https://registry.npmmirror.com/js-yaml/-/js-yaml-3.14.1.tgz" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +js-yaml@^4.0.0, js-yaml@^4.1.0: + version "4.1.0" + resolved "https://registry.npmmirror.com/js-yaml/-/js-yaml-4.1.0.tgz" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + +jsesc@^2.5.1: + version "2.5.2" + resolved "https://registry.npmmirror.com/jsesc/-/jsesc-2.5.2.tgz" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + +jsesc@~0.5.0: + version "0.5.0" + resolved "https://registry.npmmirror.com/jsesc/-/jsesc-0.5.0.tgz" + integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== + +json-buffer@3.0.1: + version "3.0.1" + resolved "https://registry.npmmirror.com/json-buffer/-/json-buffer-3.0.1.tgz" + integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ== + +json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: + version "2.3.1" + resolved "https://registry.npmmirror.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz" + integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.npmmirror.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-schema-traverse@^1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz" + integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== + +json5@^2.1.2, json5@^2.2.1: + version "2.2.3" + resolved "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz" + integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== + +jsonfile@^6.0.1: + version "6.1.0" + resolved "https://registry.npmmirror.com/jsonfile/-/jsonfile-6.1.0.tgz" + integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== + dependencies: + universalify "^2.0.0" + optionalDependencies: + graceful-fs "^4.1.6" + +katex@^0.12.0: + version "0.12.0" + resolved "https://registry.npmmirror.com/katex/-/katex-0.12.0.tgz" + integrity sha512-y+8btoc/CK70XqcHqjxiGWBOeIL8upbS0peTPXTvgrh21n1RiWWcIpSWM+4uXq+IAgNh9YYQWdc7LVDPDAEEAg== + dependencies: + commander "^2.19.0" + +keyv@*, keyv@^4.0.0: + version "4.5.0" + resolved "https://registry.npmmirror.com/keyv/-/keyv-4.5.0.tgz" + integrity sha512-2YvuMsA+jnFGtBareKqgANOEKe1mk3HKiXu2fRmAfyxG0MJAywNhi5ttWA3PMjl4NmpyjZNbFifR2vNjW1znfA== + dependencies: + json-buffer "3.0.1" + +kind-of@^6.0.0, kind-of@^6.0.2: + version "6.0.3" + resolved "https://registry.npmmirror.com/kind-of/-/kind-of-6.0.3.tgz" + integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== + +klaw-sync@^6.0.0: + version "6.0.0" + resolved "https://registry.npmmirror.com/klaw-sync/-/klaw-sync-6.0.0.tgz" + integrity sha512-nIeuVSzdCCs6TDPTqI8w1Yre34sSq7AkZ4B3sfOBbI2CgVSB4Du4aLQijFU2+lhAFCwt9+42Hel6lQNIv6AntQ== + dependencies: + graceful-fs "^4.1.11" + +kleur@^3.0.3: + version "3.0.3" + resolved "https://registry.npmmirror.com/kleur/-/kleur-3.0.3.tgz" + integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== + +klona@^2.0.5: + version "2.0.5" + resolved "https://registry.npmmirror.com/klona/-/klona-2.0.5.tgz" + integrity sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ== + +latest-version@^5.1.0: + version "5.1.0" + resolved "https://registry.npmmirror.com/latest-version/-/latest-version-5.1.0.tgz" + integrity sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA== + dependencies: + package-json "^6.3.0" + +leven@^3.1.0: + version "3.1.0" + resolved "https://registry.npmmirror.com/leven/-/leven-3.1.0.tgz" + integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== + +lilconfig@^2.0.3: + version "2.0.6" + resolved "https://registry.npmmirror.com/lilconfig/-/lilconfig-2.0.6.tgz" + integrity sha512-9JROoBW7pobfsx+Sq2JsASvCo6Pfo6WWoUW79HuB1BCoBXD4PLWJPqDF6fNj67pqBYTbAHkE57M1kS/+L1neOg== + +lines-and-columns@^1.1.6: + version "1.2.4" + resolved "https://registry.npmmirror.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz" + integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== + +loader-runner@^4.2.0: + version "4.3.0" + resolved "https://registry.npmmirror.com/loader-runner/-/loader-runner-4.3.0.tgz" + integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== + +loader-utils@2.0.4, loader-utils@^2.0.0: + version "2.0.4" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-2.0.4.tgz#8b5cb38b5c34a9a018ee1fc0e6a066d1dfcc528c" + integrity sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw== + dependencies: + big.js "^5.2.2" + emojis-list "^3.0.0" + json5 "^2.1.2" + +locate-path@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/locate-path/-/locate-path-3.0.0.tgz" + integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== + dependencies: + p-locate "^3.0.0" + path-exists "^3.0.0" + +locate-path@^5.0.0: + version "5.0.0" + resolved "https://registry.npmmirror.com/locate-path/-/locate-path-5.0.0.tgz" + integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== + dependencies: + p-locate "^4.1.0" + +locate-path@^6.0.0: + version "6.0.0" + resolved "https://registry.npmmirror.com/locate-path/-/locate-path-6.0.0.tgz" + integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== + dependencies: + p-locate "^5.0.0" + +lodash.curry@^4.0.1: + version "4.1.1" + resolved "https://registry.npmmirror.com/lodash.curry/-/lodash.curry-4.1.1.tgz" + integrity sha512-/u14pXGviLaweY5JI0IUzgzF2J6Ne8INyzAZjImcryjgkZ+ebruBxy2/JaOOkTqScddcYtakjhSaeemV8lR0tA== + +lodash.debounce@^4.0.8: + version "4.0.8" + resolved "https://registry.npmmirror.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz" + integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow== + +lodash.flow@^3.3.0: + version "3.5.0" + resolved "https://registry.npmmirror.com/lodash.flow/-/lodash.flow-3.5.0.tgz" + integrity sha512-ff3BX/tSioo+XojX4MOsOMhJw0nZoUEF011LX8g8d3gvjVbxd89cCio4BCXronjxcTUIJUoqKEUA+n4CqvvRPw== + +lodash.memoize@^4.1.2: + version "4.1.2" + resolved "https://registry.npmmirror.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz" + integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== + +lodash.uniq@4.5.0, lodash.uniq@^4.5.0: + version "4.5.0" + resolved "https://registry.npmmirror.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz" + integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ== + +lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21: + version "4.17.21" + resolved "https://registry.npmmirror.com/lodash/-/lodash-4.17.21.tgz" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.2.0, loose-envify@^1.3.1, loose-envify@^1.4.0: + version "1.4.0" + resolved "https://registry.npmmirror.com/loose-envify/-/loose-envify-1.4.0.tgz" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +lower-case@^2.0.2: + version "2.0.2" + resolved "https://registry.npmmirror.com/lower-case/-/lower-case-2.0.2.tgz" + integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== + dependencies: + tslib "^2.0.3" + +lowercase-keys@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz" + integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== + +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.npmmirror.com/lru-cache/-/lru-cache-6.0.0.tgz" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +lunr-languages@^1.4.0: + version "1.10.0" + resolved "https://registry.npmmirror.com/lunr-languages/-/lunr-languages-1.10.0.tgz" + integrity sha512-BBjKKcwrieJlzwwc9M5H/MRXGJ2qyOSDx/NXYiwkuKjiLOOoouh0WsDzeqcLoUWcX31y7i8sb8IgsZKObdUCkw== + +lunr@^2.3.9: + version "2.3.9" + resolved "https://registry.npmmirror.com/lunr/-/lunr-2.3.9.tgz" + integrity sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow== + +magic-string@^0.25.3: + version "0.25.9" + resolved "https://registry.npmmirror.com/magic-string/-/magic-string-0.25.9.tgz" + integrity sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ== + dependencies: + sourcemap-codec "^1.4.8" + +make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: + version "3.1.0" + resolved "https://registry.npmmirror.com/make-dir/-/make-dir-3.1.0.tgz" + integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== + dependencies: + semver "^6.0.0" + +mark.js@^8.11.1: + version "8.11.1" + resolved "https://registry.npmmirror.com/mark.js/-/mark.js-8.11.1.tgz" + integrity sha512-1I+1qpDt4idfgLQG+BNWmrqku+7/2bi5nLf4YwF8y8zXvmfiTBY3PV3ZibfrjBueCByROpuBjLLFCajqkgYoLQ== + +markdown-escapes@^1.0.0: + version "1.0.4" + resolved "https://registry.npmmirror.com/markdown-escapes/-/markdown-escapes-1.0.4.tgz" + integrity sha512-8z4efJYk43E0upd0NbVXwgSTQs6cT3T06etieCMEg7dRbzCbxUCK/GHlX8mhHRDcp+OLlHkPKsvqQTCvsRl2cg== + +mdast-squeeze-paragraphs@^4.0.0: + version "4.0.0" + resolved "https://registry.npmmirror.com/mdast-squeeze-paragraphs/-/mdast-squeeze-paragraphs-4.0.0.tgz" + integrity sha512-zxdPn69hkQ1rm4J+2Cs2j6wDEv7O17TfXTJ33tl/+JPIoEmtV9t2ZzBM5LPHE8QlHsmVD8t3vPKCyY3oH+H8MQ== + dependencies: + unist-util-remove "^2.0.0" + +mdast-util-definitions@^4.0.0: + version "4.0.0" + resolved "https://registry.npmmirror.com/mdast-util-definitions/-/mdast-util-definitions-4.0.0.tgz" + integrity sha512-k8AJ6aNnUkB7IE+5azR9h81O5EQ/cTDXtWdMq9Kk5KcEW/8ritU5CeLg/9HhOC++nALHBlaogJ5jz0Ybk3kPMQ== + dependencies: + unist-util-visit "^2.0.0" + +mdast-util-to-hast@10.0.1: + version "10.0.1" + resolved "https://registry.npmmirror.com/mdast-util-to-hast/-/mdast-util-to-hast-10.0.1.tgz" + integrity sha512-BW3LM9SEMnjf4HXXVApZMt8gLQWVNXc3jryK0nJu/rOXPOnlkUjmdkDlmxMirpbU9ILncGFIwLH/ubnWBbcdgA== + dependencies: + "@types/mdast" "^3.0.0" + "@types/unist" "^2.0.0" + mdast-util-definitions "^4.0.0" + mdurl "^1.0.0" + unist-builder "^2.0.0" + unist-util-generated "^1.0.0" + unist-util-position "^3.0.0" + unist-util-visit "^2.0.0" + +mdast-util-to-string@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/mdast-util-to-string/-/mdast-util-to-string-2.0.0.tgz" + integrity sha512-AW4DRS3QbBayY/jJmD8437V1Gombjf8RSOUCMFBuo5iHi58AGEgVCKQ+ezHkZZDpAQS75hcBMpLqjpJTjtUL7w== + +mdn-data@2.0.14: + version "2.0.14" + resolved "https://registry.npmmirror.com/mdn-data/-/mdn-data-2.0.14.tgz" + integrity sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow== + +mdn-data@2.0.4: + version "2.0.4" + resolved "https://registry.npmmirror.com/mdn-data/-/mdn-data-2.0.4.tgz" + integrity sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA== + +mdurl@^1.0.0: + version "1.0.1" + resolved "https://registry.npmmirror.com/mdurl/-/mdurl-1.0.1.tgz" + integrity sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g== + +media-typer@0.3.0: + version "0.3.0" + resolved "https://registry.npmmirror.com/media-typer/-/media-typer-0.3.0.tgz" + integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== + +memfs@^3.1.2, memfs@^3.4.3: + version "3.4.10" + resolved "https://registry.npmmirror.com/memfs/-/memfs-3.4.10.tgz" + integrity sha512-0bCUP+L79P4am30yP1msPzApwuMQG23TjwlwdHeEV5MxioDR1a0AgB0T9FfggU52eJuDCq8WVwb5ekznFyWiTQ== + dependencies: + fs-monkey "^1.0.3" + +merge-descriptors@1.0.1: + version "1.0.1" + resolved "https://registry.npmmirror.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz" + integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== + +merge-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/merge-stream/-/merge-stream-2.0.0.tgz" + integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== + +merge2@^1.3.0, merge2@^1.4.1: + version "1.4.1" + resolved "https://registry.npmmirror.com/merge2/-/merge2-1.4.1.tgz" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +methods@~1.1.2: + version "1.1.2" + resolved "https://registry.npmmirror.com/methods/-/methods-1.1.2.tgz" + integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== + +micromatch@^4.0.2, micromatch@^4.0.4, micromatch@^4.0.5: + version "4.0.5" + resolved "https://registry.npmmirror.com/micromatch/-/micromatch-4.0.5.tgz" + integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + dependencies: + braces "^3.0.2" + picomatch "^2.3.1" + +mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": + version "1.52.0" + resolved "https://registry.npmmirror.com/mime-db/-/mime-db-1.52.0.tgz" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-db@~1.33.0: + version "1.33.0" + resolved "https://registry.npmmirror.com/mime-db/-/mime-db-1.33.0.tgz" + integrity sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ== + +mime-types@2.1.18, mime-types@~2.1.17: + version "2.1.18" + resolved "https://registry.npmmirror.com/mime-types/-/mime-types-2.1.18.tgz" + integrity sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ== + dependencies: + mime-db "~1.33.0" + +mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.24, mime-types@~2.1.34: + version "2.1.35" + resolved "https://registry.npmmirror.com/mime-types/-/mime-types-2.1.35.tgz" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime@1.6.0: + version "1.6.0" + resolved "https://registry.npmmirror.com/mime/-/mime-1.6.0.tgz" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + +mimic-fn@^2.1.0: + version "2.1.0" + resolved "https://registry.npmmirror.com/mimic-fn/-/mimic-fn-2.1.0.tgz" + integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== + +mimic-response@^1.0.0: + version "1.0.1" + resolved "https://registry.npmmirror.com/mimic-response/-/mimic-response-1.0.1.tgz" + integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== + +mimic-response@^3.1.0: + version "3.1.0" + resolved "https://registry.npmmirror.com/mimic-response/-/mimic-response-3.1.0.tgz" + integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ== + +mini-css-extract-plugin@^1.6.0: + version "1.6.2" + resolved "https://registry.npmmirror.com/mini-css-extract-plugin/-/mini-css-extract-plugin-1.6.2.tgz" + integrity sha512-WhDvO3SjGm40oV5y26GjMJYjd2UMqrLAGKy5YS2/3QKJy2F7jgynuHTir/tgUUOiNQu5saXHdc8reo7YuhhT4Q== + dependencies: + loader-utils "^2.0.0" + schema-utils "^3.0.0" + webpack-sources "^1.1.0" + +minimalistic-assert@^1.0.0: + version "1.0.1" + resolved "https://registry.npmmirror.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz" + integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== + +minimatch@3.0.5, minimatch@3.1.2, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1: + version "3.0.5" + resolved "https://registry.npmmirror.com/minimatch/-/minimatch-3.0.5.tgz" + integrity sha512-tUpxzX0VAzJHjLu0xUfFv1gwVp9ba3IOuRAVH2EGuRW8a5emA2FlACLqiT/lDVtS1W+TGNwqz3sWaNyLgDJWuw== + dependencies: + brace-expansion "^1.1.7" + +minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6: + version "1.2.7" + resolved "https://registry.npmmirror.com/minimist/-/minimist-1.2.7.tgz" + integrity sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g== + +mkdirp@~0.5.1: + version "0.5.6" + resolved "https://registry.npmmirror.com/mkdirp/-/mkdirp-0.5.6.tgz" + integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== + dependencies: + minimist "^1.2.6" + +mrmime@^1.0.0: + version "1.0.1" + resolved "https://registry.npmmirror.com/mrmime/-/mrmime-1.0.1.tgz" + integrity sha512-hzzEagAgDyoU1Q6yg5uI+AorQgdvMCur3FcKf7NhMKWsaYg+RnbTyHRa/9IlLF9rf455MOCtcqqrQQ83pPP7Uw== + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/ms/-/ms-2.0.0.tgz" + integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.npmmirror.com/ms/-/ms-2.1.2.tgz" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +ms@2.1.3: + version "2.1.3" + resolved "https://registry.npmmirror.com/ms/-/ms-2.1.3.tgz" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +multicast-dns@^7.2.5: + version "7.2.5" + resolved "https://registry.npmmirror.com/multicast-dns/-/multicast-dns-7.2.5.tgz" + integrity sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg== + dependencies: + dns-packet "^5.2.2" + thunky "^1.0.2" + +nanoid@^3.3.6: + version "3.3.6" + resolved "https://registry.npmjs.org/nanoid/-/nanoid-3.3.6.tgz" + integrity sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA== + +negotiator@0.6.3: + version "0.6.3" + resolved "https://registry.npmmirror.com/negotiator/-/negotiator-0.6.3.tgz" + integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== + +neo-async@^2.6.2: + version "2.6.2" + resolved "https://registry.npmmirror.com/neo-async/-/neo-async-2.6.2.tgz" + integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== + +no-case@^3.0.4: + version "3.0.4" + resolved "https://registry.npmmirror.com/no-case/-/no-case-3.0.4.tgz" + integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg== + dependencies: + lower-case "^2.0.2" + tslib "^2.0.3" + +node-emoji@^1.10.0: + version "1.11.0" + resolved "https://registry.npmmirror.com/node-emoji/-/node-emoji-1.11.0.tgz" + integrity sha512-wo2DpQkQp7Sjm2A0cq+sN7EHKO6Sl0ctXeBdFZrL9T9+UywORbufTcTZxom8YqpLQt/FqNMUkOpkZrJVYSKD3A== + dependencies: + lodash "^4.17.21" + +node-fetch@2.6.7: + version "2.6.7" + resolved "https://registry.npmmirror.com/node-fetch/-/node-fetch-2.6.7.tgz" + integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== + dependencies: + whatwg-url "^5.0.0" + +node-forge@1.3.0, node-forge@^1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.0.tgz#37a874ea723855f37db091e6c186e5b67a01d4b2" + integrity sha512-08ARB91bUi6zNKzVmaj3QO7cr397uiDT2nJ63cHjyNtCTWIgvS47j3eT0WfzUwS9+6Z5YshRaoasFkXCKrIYbA== + +node-releases@^2.0.18: + version "2.0.18" + resolved "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz" + integrity sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g== + +normalize-path@^3.0.0, normalize-path@~3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/normalize-path/-/normalize-path-3.0.0.tgz" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +normalize-range@^0.1.2: + version "0.1.2" + resolved "https://registry.npmmirror.com/normalize-range/-/normalize-range-0.1.2.tgz" + integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA== + +normalize-url@^6.0.1: + version "6.1.0" + resolved "https://registry.npmmirror.com/normalize-url/-/normalize-url-6.1.0.tgz" + integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== + +npm-run-path@^4.0.1: + version "4.0.1" + resolved "https://registry.npmmirror.com/npm-run-path/-/npm-run-path-4.0.1.tgz" + integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== + dependencies: + path-key "^3.0.0" + +nprogress@^0.2.0: + version "0.2.0" + resolved "https://registry.npmmirror.com/nprogress/-/nprogress-0.2.0.tgz" + integrity sha512-I19aIingLgR1fmhftnbWWO3dXc0hSxqHQHQb3H8m+K3TnEn/iSeTZZOyvKXWqQESMwuUVnatlCnZdLBZZt2VSA== + +nth-check@2.0.1, nth-check@^1.0.2, nth-check@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.0.1.tgz#2efe162f5c3da06a28959fbd3db75dbeea9f0fc2" + integrity sha512-it1vE95zF6dTT9lBsYbxvqh0Soy4SPowchj0UBGj/V6cTPnXXtQOPUbhZ6CmGzAD/rW22LQK6E96pcdJXk4A4w== + dependencies: + boolbase "^1.0.0" + +object-assign@^4.1.0, object-assign@^4.1.1: + version "4.1.1" + resolved "https://registry.npmmirror.com/object-assign/-/object-assign-4.1.1.tgz" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== + +object-inspect@^1.12.2, object-inspect@^1.9.0: + version "1.12.2" + resolved "https://registry.npmmirror.com/object-inspect/-/object-inspect-1.12.2.tgz" + integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== + +object-keys@^1.1.1: + version "1.1.1" + resolved "https://registry.npmmirror.com/object-keys/-/object-keys-1.1.1.tgz" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object.assign@^4.1.0, object.assign@^4.1.4: + version "4.1.4" + resolved "https://registry.npmmirror.com/object.assign/-/object.assign-4.1.4.tgz" + integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + has-symbols "^1.0.3" + object-keys "^1.1.1" + +object.getownpropertydescriptors@^2.1.0: + version "2.1.4" + resolved "https://registry.npmmirror.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.4.tgz" + integrity sha512-sccv3L/pMModT6dJAYF3fzGMVcb38ysQ0tEE6ixv2yXJDtEIPph268OlAdJj5/qZMZDq2g/jqvwppt36uS/uQQ== + dependencies: + array.prototype.reduce "^1.0.4" + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.1" + +object.values@^1.1.0: + version "1.1.5" + resolved "https://registry.npmmirror.com/object.values/-/object.values-1.1.5.tgz" + integrity sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +obuf@^1.0.0, obuf@^1.1.2: + version "1.1.2" + resolved "https://registry.npmmirror.com/obuf/-/obuf-1.1.2.tgz" + integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== + +on-finished@2.4.1: + version "2.4.1" + resolved "https://registry.npmmirror.com/on-finished/-/on-finished-2.4.1.tgz" + integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== + dependencies: + ee-first "1.1.1" + +on-headers@~1.0.2: + version "1.0.2" + resolved "https://registry.npmmirror.com/on-headers/-/on-headers-1.0.2.tgz" + integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== + +once@^1.3.0, once@^1.3.1, once@^1.4.0: + version "1.4.0" + resolved "https://registry.npmmirror.com/once/-/once-1.4.0.tgz" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +onetime@^5.1.2: + version "5.1.2" + resolved "https://registry.npmmirror.com/onetime/-/onetime-5.1.2.tgz" + integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== + dependencies: + mimic-fn "^2.1.0" + +open@^7.0.2: + version "7.4.2" + resolved "https://registry.npmmirror.com/open/-/open-7.4.2.tgz" + integrity sha512-MVHddDVweXZF3awtlAS+6pgKLlm/JgxZ90+/NBurBoQctVOOB/zDdVjcyPzQ+0laDGbsWgrRkflI65sQeOgT9Q== + dependencies: + is-docker "^2.0.0" + is-wsl "^2.1.1" + +open@^8.0.9: + version "8.4.0" + resolved "https://registry.npmmirror.com/open/-/open-8.4.0.tgz" + integrity sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q== + dependencies: + define-lazy-prop "^2.0.0" + is-docker "^2.1.1" + is-wsl "^2.2.0" + +opener@^1.5.2: + version "1.5.2" + resolved "https://registry.npmmirror.com/opener/-/opener-1.5.2.tgz" + integrity sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A== + +p-cancelable@^2.0.0: + version "2.1.1" + resolved "https://registry.npmmirror.com/p-cancelable/-/p-cancelable-2.1.1.tgz" + integrity sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg== + +p-limit@^2.0.0, p-limit@^2.2.0: + version "2.3.0" + resolved "https://registry.npmmirror.com/p-limit/-/p-limit-2.3.0.tgz" + integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== + dependencies: + p-try "^2.0.0" + +p-limit@^3.0.2: + version "3.1.0" + resolved "https://registry.npmmirror.com/p-limit/-/p-limit-3.1.0.tgz" + integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== + dependencies: + yocto-queue "^0.1.0" + +p-locate@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/p-locate/-/p-locate-3.0.0.tgz" + integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== + dependencies: + p-limit "^2.0.0" + +p-locate@^4.1.0: + version "4.1.0" + resolved "https://registry.npmmirror.com/p-locate/-/p-locate-4.1.0.tgz" + integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== + dependencies: + p-limit "^2.2.0" + +p-locate@^5.0.0: + version "5.0.0" + resolved "https://registry.npmmirror.com/p-locate/-/p-locate-5.0.0.tgz" + integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== + dependencies: + p-limit "^3.0.2" + +p-map@^4.0.0: + version "4.0.0" + resolved "https://registry.npmmirror.com/p-map/-/p-map-4.0.0.tgz" + integrity sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ== + dependencies: + aggregate-error "^3.0.0" + +p-retry@^4.5.0: + version "4.6.2" + resolved "https://registry.npmmirror.com/p-retry/-/p-retry-4.6.2.tgz" + integrity sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ== + dependencies: + "@types/retry" "0.12.0" + retry "^0.13.1" + +p-try@^2.0.0: + version "2.2.0" + resolved "https://registry.npmmirror.com/p-try/-/p-try-2.2.0.tgz" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + +package-json@^6.3.0: + version "6.5.0" + resolved "https://registry.npmmirror.com/package-json/-/package-json-6.5.0.tgz" + integrity sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ== + dependencies: + got "^9.6.0" + registry-auth-token "^4.0.0" + registry-url "^5.0.0" + semver "^6.2.0" + +param-case@^3.0.4: + version "3.0.4" + resolved "https://registry.npmmirror.com/param-case/-/param-case-3.0.4.tgz" + integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A== + dependencies: + dot-case "^3.0.4" + tslib "^2.0.3" + +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.npmmirror.com/parent-module/-/parent-module-1.0.1.tgz" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +parse-entities@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/parse-entities/-/parse-entities-2.0.0.tgz" + integrity sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ== + dependencies: + character-entities "^1.0.0" + character-entities-legacy "^1.0.0" + character-reference-invalid "^1.0.0" + is-alphanumerical "^1.0.0" + is-decimal "^1.0.0" + is-hexadecimal "^1.0.0" + +parse-json@^5.0.0: + version "5.2.0" + resolved "https://registry.npmmirror.com/parse-json/-/parse-json-5.2.0.tgz" + integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== + dependencies: + "@babel/code-frame" "^7.0.0" + error-ex "^1.3.1" + json-parse-even-better-errors "^2.3.0" + lines-and-columns "^1.1.6" + +parse-numeric-range@^1.3.0: + version "1.3.0" + resolved "https://registry.npmmirror.com/parse-numeric-range/-/parse-numeric-range-1.3.0.tgz" + integrity sha512-twN+njEipszzlMJd4ONUYgSfZPDxgHhT9Ahed5uTigpQn90FggW4SA/AIPq/6a149fTbE9qBEcSwE3FAEp6wQQ== + +parse5-htmlparser2-tree-adapter@^7.0.0: + version "7.0.0" + resolved "https://registry.npmmirror.com/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.0.0.tgz" + integrity sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g== + dependencies: + domhandler "^5.0.2" + parse5 "^7.0.0" + +parse5@^5.0.0: + version "5.1.1" + resolved "https://registry.npmmirror.com/parse5/-/parse5-5.1.1.tgz" + integrity sha512-ugq4DFI0Ptb+WWjAdOK16+u/nHfiIrcE+sh8kZMaM0WllQKLI9rOUq6c2b7cwPkXdzfQESqvoqK6ug7U/Yyzug== + +parse5@^6.0.0: + version "6.0.1" + resolved "https://registry.npmmirror.com/parse5/-/parse5-6.0.1.tgz" + integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== + +parse5@^7.0.0: + version "7.1.1" + resolved "https://registry.npmmirror.com/parse5/-/parse5-7.1.1.tgz" + integrity sha512-kwpuwzB+px5WUg9pyK0IcK/shltJN5/OVhQagxhCQNtT9Y9QRZqNY2e1cmbu/paRh5LMnz/oVTVLBpjFmMZhSg== + dependencies: + entities "^4.4.0" + +parseurl@~1.3.2, parseurl@~1.3.3: + version "1.3.3" + resolved "https://registry.npmmirror.com/parseurl/-/parseurl-1.3.3.tgz" + integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== + +pascal-case@^3.1.2: + version "3.1.2" + resolved "https://registry.npmmirror.com/pascal-case/-/pascal-case-3.1.2.tgz" + integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + +path-exists@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/path-exists/-/path-exists-3.0.0.tgz" + integrity sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ== + +path-exists@^4.0.0: + version "4.0.0" + resolved "https://registry.npmmirror.com/path-exists/-/path-exists-4.0.0.tgz" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.npmmirror.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== + +path-is-inside@1.0.2: + version "1.0.2" + resolved "https://registry.npmmirror.com/path-is-inside/-/path-is-inside-1.0.2.tgz" + integrity sha512-DUWJr3+ULp4zXmol/SZkFf3JGsS9/SIv+Y3Rt93/UjPpDpklB5f1er4O3POIbUuUJ3FXgqte2Q7SrU6zAqwk8w== + +path-key@^3.0.0, path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.npmmirror.com/path-key/-/path-key-3.1.1.tgz" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.7: + version "1.0.7" + resolved "https://registry.npmmirror.com/path-parse/-/path-parse-1.0.7.tgz" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-to-regexp@0.1.7: + version "0.1.7" + resolved "https://registry.npmmirror.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz" + integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== + +path-to-regexp@2.2.1: + version "2.2.1" + resolved "https://registry.npmmirror.com/path-to-regexp/-/path-to-regexp-2.2.1.tgz" + integrity sha512-gu9bD6Ta5bwGrrU8muHzVOBFFREpp2iRkVfhBJahwJ6p6Xw20SjT0MxLnwkjOibQmGSYhiUnf2FLe7k+jcFmGQ== + +path-to-regexp@^1.7.0: + version "1.8.0" + resolved "https://registry.npmmirror.com/path-to-regexp/-/path-to-regexp-1.8.0.tgz" + integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA== + dependencies: + isarray "0.0.1" + +path-type@^4.0.0: + version "4.0.0" + resolved "https://registry.npmmirror.com/path-type/-/path-type-4.0.0.tgz" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + +picocolors@^1.0.0, picocolors@^1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz" + integrity sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew== + +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1: + version "2.3.1" + resolved "https://registry.npmmirror.com/picomatch/-/picomatch-2.3.1.tgz" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +pify@^4.0.1: + version "4.0.1" + resolved "https://registry.npmmirror.com/pify/-/pify-4.0.1.tgz" + integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== + +pkg-dir@^4.1.0: + version "4.2.0" + resolved "https://registry.npmmirror.com/pkg-dir/-/pkg-dir-4.2.0.tgz" + integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== + dependencies: + find-up "^4.0.0" + +pkg-up@^3.1.0: + version "3.1.0" + resolved "https://registry.npmmirror.com/pkg-up/-/pkg-up-3.1.0.tgz" + integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA== + dependencies: + find-up "^3.0.0" + +postcss-calc@^8.2.3: + version "8.2.4" + resolved "https://registry.npmmirror.com/postcss-calc/-/postcss-calc-8.2.4.tgz" + integrity sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q== + dependencies: + postcss-selector-parser "^6.0.9" + postcss-value-parser "^4.2.0" + +postcss-colormin@^5.3.0: + version "5.3.0" + resolved "https://registry.npmmirror.com/postcss-colormin/-/postcss-colormin-5.3.0.tgz" + integrity sha512-WdDO4gOFG2Z8n4P8TWBpshnL3JpmNmJwdnfP2gbk2qBA8PWwOYcmjmI/t3CmMeL72a7Hkd+x/Mg9O2/0rD54Pg== + dependencies: + browserslist "^4.16.6" + caniuse-api "^3.0.0" + colord "^2.9.1" + postcss-value-parser "^4.2.0" + +postcss-convert-values@^5.1.3: + version "5.1.3" + resolved "https://registry.npmmirror.com/postcss-convert-values/-/postcss-convert-values-5.1.3.tgz" + integrity sha512-82pC1xkJZtcJEfiLw6UXnXVXScgtBrjlO5CBmuDQc+dlb88ZYheFsjTn40+zBVi3DkfF7iezO0nJUPLcJK3pvA== + dependencies: + browserslist "^4.21.4" + postcss-value-parser "^4.2.0" + +postcss-discard-comments@^5.1.2: + version "5.1.2" + resolved "https://registry.npmmirror.com/postcss-discard-comments/-/postcss-discard-comments-5.1.2.tgz" + integrity sha512-+L8208OVbHVF2UQf1iDmRcbdjJkuBF6IS29yBDSiWUIzpYaAhtNl6JYnYm12FnkeCwQqF5LeklOu6rAqgfBZqQ== + +postcss-discard-duplicates@^5.1.0: + version "5.1.0" + resolved "https://registry.npmmirror.com/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz" + integrity sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw== + +postcss-discard-empty@^5.1.1: + version "5.1.1" + resolved "https://registry.npmmirror.com/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz" + integrity sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A== + +postcss-discard-overridden@^5.1.0: + version "5.1.0" + resolved "https://registry.npmmirror.com/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz" + integrity sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw== + +postcss-discard-unused@^5.1.0: + version "5.1.0" + resolved "https://registry.npmmirror.com/postcss-discard-unused/-/postcss-discard-unused-5.1.0.tgz" + integrity sha512-KwLWymI9hbwXmJa0dkrzpRbSJEh0vVUd7r8t0yOGPcfKzyJJxFM8kLyC5Ev9avji6nY95pOp1W6HqIrfT+0VGw== + dependencies: + postcss-selector-parser "^6.0.5" + +postcss-loader@^6.1.1: + version "6.2.1" + resolved "https://registry.npmmirror.com/postcss-loader/-/postcss-loader-6.2.1.tgz" + integrity sha512-WbbYpmAaKcux/P66bZ40bpWsBucjx/TTgVVzRZ9yUO8yQfVBlameJ0ZGVaPfH64hNSBh63a+ICP5nqOpBA0w+Q== + dependencies: + cosmiconfig "^7.0.0" + klona "^2.0.5" + semver "^7.3.5" + +postcss-merge-idents@^5.1.1: + version "5.1.1" + resolved "https://registry.npmmirror.com/postcss-merge-idents/-/postcss-merge-idents-5.1.1.tgz" + integrity sha512-pCijL1TREiCoog5nQp7wUe+TUonA2tC2sQ54UGeMmryK3UFGIYKqDyjnqd6RcuI4znFn9hWSLNN8xKE/vWcUQw== + dependencies: + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-merge-longhand@^5.1.7: + version "5.1.7" + resolved "https://registry.npmmirror.com/postcss-merge-longhand/-/postcss-merge-longhand-5.1.7.tgz" + integrity sha512-YCI9gZB+PLNskrK0BB3/2OzPnGhPkBEwmwhfYk1ilBHYVAZB7/tkTHFBAnCrvBBOmeYyMYw3DMjT55SyxMBzjQ== + dependencies: + postcss-value-parser "^4.2.0" + stylehacks "^5.1.1" + +postcss-merge-rules@^5.1.3: + version "5.1.3" + resolved "https://registry.npmmirror.com/postcss-merge-rules/-/postcss-merge-rules-5.1.3.tgz" + integrity sha512-LbLd7uFC00vpOuMvyZop8+vvhnfRGpp2S+IMQKeuOZZapPRY4SMq5ErjQeHbHsjCUgJkRNrlU+LmxsKIqPKQlA== + dependencies: + browserslist "^4.21.4" + caniuse-api "^3.0.0" + cssnano-utils "^3.1.0" + postcss-selector-parser "^6.0.5" + +postcss-minify-font-values@^5.1.0: + version "5.1.0" + resolved "https://registry.npmmirror.com/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz" + integrity sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-minify-gradients@^5.1.1: + version "5.1.1" + resolved "https://registry.npmmirror.com/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz" + integrity sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw== + dependencies: + colord "^2.9.1" + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-minify-params@^5.1.4: + version "5.1.4" + resolved "https://registry.npmmirror.com/postcss-minify-params/-/postcss-minify-params-5.1.4.tgz" + integrity sha512-+mePA3MgdmVmv6g+30rn57USjOGSAyuxUmkfiWpzalZ8aiBkdPYjXWtHuwJGm1v5Ojy0Z0LaSYhHaLJQB0P8Jw== + dependencies: + browserslist "^4.21.4" + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-minify-selectors@^5.2.1: + version "5.2.1" + resolved "https://registry.npmmirror.com/postcss-minify-selectors/-/postcss-minify-selectors-5.2.1.tgz" + integrity sha512-nPJu7OjZJTsVUmPdm2TcaiohIwxP+v8ha9NehQ2ye9szv4orirRU3SDdtUmKH+10nzn0bAyOXZ0UEr7OpvLehg== + dependencies: + postcss-selector-parser "^6.0.5" + +postcss-modules-extract-imports@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz" + integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw== + +postcss-modules-local-by-default@^4.0.0: + version "4.0.0" + resolved "https://registry.npmmirror.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz" + integrity sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ== + dependencies: + icss-utils "^5.0.0" + postcss-selector-parser "^6.0.2" + postcss-value-parser "^4.1.0" + +postcss-modules-scope@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz" + integrity sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg== + dependencies: + postcss-selector-parser "^6.0.4" + +postcss-modules-values@^4.0.0: + version "4.0.0" + resolved "https://registry.npmmirror.com/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz" + integrity sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ== + dependencies: + icss-utils "^5.0.0" + +postcss-normalize-charset@^5.1.0: + version "5.1.0" + resolved "https://registry.npmmirror.com/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz" + integrity sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg== + +postcss-normalize-display-values@^5.1.0: + version "5.1.0" + resolved "https://registry.npmmirror.com/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz" + integrity sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-positions@^5.1.1: + version "5.1.1" + resolved "https://registry.npmmirror.com/postcss-normalize-positions/-/postcss-normalize-positions-5.1.1.tgz" + integrity sha512-6UpCb0G4eofTCQLFVuI3EVNZzBNPiIKcA1AKVka+31fTVySphr3VUgAIULBhxZkKgwLImhzMR2Bw1ORK+37INg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-repeat-style@^5.1.1: + version "5.1.1" + resolved "https://registry.npmmirror.com/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.1.tgz" + integrity sha512-mFpLspGWkQtBcWIRFLmewo8aC3ImN2i/J3v8YCFUwDnPu3Xz4rLohDO26lGjwNsQxB3YF0KKRwspGzE2JEuS0g== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-string@^5.1.0: + version "5.1.0" + resolved "https://registry.npmmirror.com/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz" + integrity sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-timing-functions@^5.1.0: + version "5.1.0" + resolved "https://registry.npmmirror.com/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz" + integrity sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-unicode@^5.1.1: + version "5.1.1" + resolved "https://registry.npmmirror.com/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.1.tgz" + integrity sha512-qnCL5jzkNUmKVhZoENp1mJiGNPcsJCs1aaRmURmeJGES23Z/ajaln+EPTD+rBeNkSryI+2WTdW+lwcVdOikrpA== + dependencies: + browserslist "^4.21.4" + postcss-value-parser "^4.2.0" + +postcss-normalize-url@^5.1.0: + version "5.1.0" + resolved "https://registry.npmmirror.com/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz" + integrity sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew== + dependencies: + normalize-url "^6.0.1" + postcss-value-parser "^4.2.0" + +postcss-normalize-whitespace@^5.1.1: + version "5.1.1" + resolved "https://registry.npmmirror.com/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz" + integrity sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-ordered-values@^5.1.3: + version "5.1.3" + resolved "https://registry.npmmirror.com/postcss-ordered-values/-/postcss-ordered-values-5.1.3.tgz" + integrity sha512-9UO79VUhPwEkzbb3RNpqqghc6lcYej1aveQteWY+4POIwlqkYE21HKWaLDF6lWNuqCobEAyTovVhtI32Rbv2RQ== + dependencies: + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-reduce-idents@^5.2.0: + version "5.2.0" + resolved "https://registry.npmmirror.com/postcss-reduce-idents/-/postcss-reduce-idents-5.2.0.tgz" + integrity sha512-BTrLjICoSB6gxbc58D5mdBK8OhXRDqud/zodYfdSi52qvDHdMwk+9kB9xsM8yJThH/sZU5A6QVSmMmaN001gIg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-reduce-initial@^5.1.1: + version "5.1.1" + resolved "https://registry.npmmirror.com/postcss-reduce-initial/-/postcss-reduce-initial-5.1.1.tgz" + integrity sha512-//jeDqWcHPuXGZLoolFrUXBDyuEGbr9S2rMo19bkTIjBQ4PqkaO+oI8wua5BOUxpfi97i3PCoInsiFIEBfkm9w== + dependencies: + browserslist "^4.21.4" + caniuse-api "^3.0.0" + +postcss-reduce-transforms@^5.1.0: + version "5.1.0" + resolved "https://registry.npmmirror.com/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz" + integrity sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.5, postcss-selector-parser@^6.0.9: + version "6.0.10" + resolved "https://registry.npmmirror.com/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz" + integrity sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w== + dependencies: + cssesc "^3.0.0" + util-deprecate "^1.0.2" + +postcss-sort-media-queries@^4.1.0: + version "4.3.0" + resolved "https://registry.npmmirror.com/postcss-sort-media-queries/-/postcss-sort-media-queries-4.3.0.tgz" + integrity sha512-jAl8gJM2DvuIJiI9sL1CuiHtKM4s5aEIomkU8G3LFvbP+p8i7Sz8VV63uieTgoewGqKbi+hxBTiOKJlB35upCg== + dependencies: + sort-css-media-queries "2.1.0" + +postcss-svgo@^5.1.0: + version "5.1.0" + resolved "https://registry.npmmirror.com/postcss-svgo/-/postcss-svgo-5.1.0.tgz" + integrity sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA== + dependencies: + postcss-value-parser "^4.2.0" + svgo "^2.7.0" + +postcss-unique-selectors@^5.1.1: + version "5.1.1" + resolved "https://registry.npmmirror.com/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz" + integrity sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA== + dependencies: + postcss-selector-parser "^6.0.5" + +postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0: + version "4.2.0" + resolved "https://registry.npmmirror.com/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz" + integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== + +postcss-zindex@^5.1.0: + version "5.1.0" + resolved "https://registry.npmmirror.com/postcss-zindex/-/postcss-zindex-5.1.0.tgz" + integrity sha512-fgFMf0OtVSBR1va1JNHYgMxYk73yhn/qb4uQDq1DLGYolz8gHCyr/sesEuGUaYs58E3ZJRcpoGuPVoB7Meiq9A== + +postcss@^8.2.15, postcss@^8.3.11, postcss@^8.3.5, postcss@^8.3.7: + version "8.4.31" + resolved "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz" + integrity sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ== + dependencies: + nanoid "^3.3.6" + picocolors "^1.0.0" + source-map-js "^1.0.2" + +pretty-error@^4.0.0: + version "4.0.0" + resolved "https://registry.npmmirror.com/pretty-error/-/pretty-error-4.0.0.tgz" + integrity sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw== + dependencies: + lodash "^4.17.20" + renderkid "^3.0.0" + +pretty-time@^1.1.0: + version "1.1.0" + resolved "https://registry.npmmirror.com/pretty-time/-/pretty-time-1.1.0.tgz" + integrity sha512-28iF6xPQrP8Oa6uxE6a1biz+lWeTOAPKggvjB8HAs6nVMKZwf5bG++632Dx614hIWgUPkgivRfG+a8uAXGTIbA== + +prism-react-renderer@^1.2.1: + version "1.3.5" + resolved "https://registry.npmmirror.com/prism-react-renderer/-/prism-react-renderer-1.3.5.tgz" + integrity sha512-IJ+MSwBWKG+SM3b2SUfdrhC+gu01QkV2KmRQgREThBfSQRoufqRfxfHUxpG1WcaFjP+kojcFyO9Qqtpgt3qLCg== + +prismjs@^1.23.0: + version "1.29.0" + resolved "https://registry.npmmirror.com/prismjs/-/prismjs-1.29.0.tgz" + integrity sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q== + +process-nextick-args@~2.0.0: + version "2.0.1" + resolved "https://registry.npmmirror.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== + +promise@^7.1.1: + version "7.3.1" + resolved "https://registry.npmmirror.com/promise/-/promise-7.3.1.tgz" + integrity sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg== + dependencies: + asap "~2.0.3" + +prompts@^2.4.0, prompts@^2.4.1: + version "2.4.2" + resolved "https://registry.npmmirror.com/prompts/-/prompts-2.4.2.tgz" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" + +prop-types@^15.6.2, prop-types@^15.7.2: + version "15.8.1" + resolved "https://registry.npmmirror.com/prop-types/-/prop-types-15.8.1.tgz" + integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== + dependencies: + loose-envify "^1.4.0" + object-assign "^4.1.1" + react-is "^16.13.1" + +property-information@^5.0.0, property-information@^5.3.0: + version "5.6.0" + resolved "https://registry.npmmirror.com/property-information/-/property-information-5.6.0.tgz" + integrity sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA== + dependencies: + xtend "^4.0.0" + +proxy-addr@~2.0.7: + version "2.0.7" + resolved "https://registry.npmmirror.com/proxy-addr/-/proxy-addr-2.0.7.tgz" + integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== + dependencies: + forwarded "0.2.0" + ipaddr.js "1.9.1" + +pump@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/pump/-/pump-3.0.0.tgz" + integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== + dependencies: + end-of-stream "^1.1.0" + once "^1.3.1" + +punycode@^1.3.2: + version "1.4.1" + resolved "https://registry.npmmirror.com/punycode/-/punycode-1.4.1.tgz" + integrity sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ== + +punycode@^2.1.0: + version "2.1.1" + resolved "https://registry.npmmirror.com/punycode/-/punycode-2.1.1.tgz" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +pupa@^2.1.1: + version "2.1.1" + resolved "https://registry.npmmirror.com/pupa/-/pupa-2.1.1.tgz" + integrity sha512-l1jNAspIBSFqbT+y+5FosojNpVpF94nlI+wDUpqP9enwOTfHx9f0gh5nB96vl+6yTpsJsypeNrwfzPrKuHB41A== + dependencies: + escape-goat "^2.0.0" + +pure-color@^1.2.0: + version "1.3.0" + resolved "https://registry.npmmirror.com/pure-color/-/pure-color-1.3.0.tgz" + integrity sha512-QFADYnsVoBMw1srW7OVKEYjG+MbIa49s54w1MA1EDY6r2r/sTcKKYqRX1f4GYvnXP7eN/Pe9HFcX+hwzmrXRHA== + +q@^1.1.2: + version "1.5.1" + resolved "https://registry.npmmirror.com/q/-/q-1.5.1.tgz" + integrity sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw== + +qs@6.11.0: + version "6.11.0" + resolved "https://registry.npmmirror.com/qs/-/qs-6.11.0.tgz" + integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== + dependencies: + side-channel "^1.0.4" + +querystring@0.2.0: + version "0.2.0" + resolved "https://registry.npmmirror.com/querystring/-/querystring-0.2.0.tgz" + integrity sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g== + +queue-microtask@^1.2.2: + version "1.2.3" + resolved "https://registry.npmmirror.com/queue-microtask/-/queue-microtask-1.2.3.tgz" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +quick-lru@^5.1.1: + version "5.1.1" + resolved "https://registry.npmmirror.com/quick-lru/-/quick-lru-5.1.1.tgz" + integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== + +randombytes@^2.1.0: + version "2.1.0" + resolved "https://registry.npmmirror.com/randombytes/-/randombytes-2.1.0.tgz" + integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== + dependencies: + safe-buffer "^5.1.0" + +range-parser@1.2.0: + version "1.2.0" + resolved "https://registry.npmmirror.com/range-parser/-/range-parser-1.2.0.tgz" + integrity sha512-kA5WQoNVo4t9lNx2kQNFCxKeBl5IbbSNBl1M/tLkw9WCn+hxNBAW5Qh8gdhs63CJnhjJ2zQWFoqPJP2sK1AV5A== + +range-parser@^1.2.1, range-parser@~1.2.1: + version "1.2.1" + resolved "https://registry.npmmirror.com/range-parser/-/range-parser-1.2.1.tgz" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + +raw-body@2.5.1: + version "2.5.1" + resolved "https://registry.npmmirror.com/raw-body/-/raw-body-2.5.1.tgz" + integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== + dependencies: + bytes "3.1.2" + http-errors "2.0.0" + iconv-lite "0.4.24" + unpipe "1.0.0" + +rc@1.2.8, rc@^1.2.8: + version "1.2.8" + resolved "https://registry.npmmirror.com/rc/-/rc-1.2.8.tgz" + integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== + dependencies: + deep-extend "^0.6.0" + ini "~1.3.0" + minimist "^1.2.0" + strip-json-comments "~2.0.1" + +react-base16-styling@^0.6.0: + version "0.6.0" + resolved "https://registry.npmmirror.com/react-base16-styling/-/react-base16-styling-0.6.0.tgz" + integrity sha512-yvh/7CArceR/jNATXOKDlvTnPKPmGZz7zsenQ3jUwLzHkNUR0CvY3yGYJbWJ/nnxsL8Sgmt5cO3/SILVuPO6TQ== + dependencies: + base16 "^1.0.0" + lodash.curry "^4.0.1" + lodash.flow "^3.3.0" + pure-color "^1.2.0" + +react-dev-utils@12.0.0-next.47: + version "12.0.0-next.47" + resolved "https://registry.npmmirror.com/react-dev-utils/-/react-dev-utils-12.0.0-next.47.tgz" + integrity sha512-PsE71vP15TZMmp/RZKOJC4fYD5Pvt0+wCoyG3QHclto0d4FyIJI78xGRICOOThZFROqgXYlZP6ddmeybm+jO4w== + dependencies: + "@babel/code-frame" "^7.10.4" + address "^1.1.2" + browserslist "^4.16.5" + chalk "^2.4.2" + cross-spawn "^7.0.3" + detect-port-alt "^1.1.6" + escape-string-regexp "^2.0.0" + filesize "^6.1.0" + find-up "^4.1.0" + fork-ts-checker-webpack-plugin "^6.0.5" + global-modules "^2.0.0" + globby "^11.0.1" + gzip-size "^5.1.1" + immer "^9.0.6" + is-root "^2.1.0" + loader-utils "^2.0.0" + open "^7.0.2" + pkg-up "^3.1.0" + prompts "^2.4.0" + react-error-overlay "7.0.0-next.54+1465357b" + recursive-readdir "^2.2.2" + shell-quote "^1.7.2" + strip-ansi "^6.0.0" + text-table "^0.2.0" + +react-dom@^17.0.1: + version "17.0.2" + resolved "https://registry.npmmirror.com/react-dom/-/react-dom-17.0.2.tgz" + integrity sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA== + dependencies: + loose-envify "^1.1.0" + object-assign "^4.1.1" + scheduler "^0.20.2" + +react-error-overlay@7.0.0-next.54+1465357b: + version "7.0.0-next.54" + resolved "https://registry.npmmirror.com/react-error-overlay/-/react-error-overlay-7.0.0-next.54.tgz" + integrity sha512-b96CiTnZahXPDNH9MKplvt5+jD+BkxDw7q5R3jnkUXze/ux1pLv32BBZmlj0OfCUeMqyz4sAmF+0ccJGVMlpXw== + +react-error-overlay@^6.0.9: + version "6.0.11" + resolved "https://registry.npmmirror.com/react-error-overlay/-/react-error-overlay-6.0.11.tgz" + integrity sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg== + +react-fast-compare@^3.1.1: + version "3.2.0" + resolved "https://registry.npmmirror.com/react-fast-compare/-/react-fast-compare-3.2.0.tgz" + integrity sha512-rtGImPZ0YyLrscKI9xTpV8psd6I8VAtjKCzQDlzyDvqJA8XOW78TXYQwNRNd8g8JZnDu8q9Fu/1v4HPAVwVdHA== + +react-helmet@^6.1.0: + version "6.1.0" + resolved "https://registry.npmmirror.com/react-helmet/-/react-helmet-6.1.0.tgz" + integrity sha512-4uMzEY9nlDlgxr61NL3XbKRy1hEkXmKNXhjbAIOVw5vcFrsdYbH2FEwcNyWvWinl103nXgzYNlns9ca+8kFiWw== + dependencies: + object-assign "^4.1.1" + prop-types "^15.7.2" + react-fast-compare "^3.1.1" + react-side-effect "^2.1.0" + +react-is@^16.13.1, react-is@^16.6.0, react-is@^16.7.0: + version "16.13.1" + resolved "https://registry.npmmirror.com/react-is/-/react-is-16.13.1.tgz" + integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== + +react-json-view@^1.21.3: + version "1.21.3" + resolved "https://registry.npmmirror.com/react-json-view/-/react-json-view-1.21.3.tgz" + integrity sha512-13p8IREj9/x/Ye4WI/JpjhoIwuzEgUAtgJZNBJckfzJt1qyh24BdTm6UQNGnyTq9dapQdrqvquZTo3dz1X6Cjw== + dependencies: + flux "^4.0.1" + react-base16-styling "^0.6.0" + react-lifecycles-compat "^3.0.4" + react-textarea-autosize "^8.3.2" + +react-lifecycles-compat@^3.0.4: + version "3.0.4" + resolved "https://registry.npmmirror.com/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz" + integrity sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA== + +react-loadable-ssr-addon-v5-slorber@^1.0.1: + version "1.0.1" + resolved "https://registry.npmmirror.com/react-loadable-ssr-addon-v5-slorber/-/react-loadable-ssr-addon-v5-slorber-1.0.1.tgz" + integrity sha512-lq3Lyw1lGku8zUEJPDxsNm1AfYHBrO9Y1+olAYwpUJ2IGFBskM0DMKok97A6LWUpHm+o7IvQBOWu9MLenp9Z+A== + dependencies: + "@babel/runtime" "^7.10.3" + +"react-loadable@npm:@docusaurus/react-loadable@5.5.2": + version "5.5.2" + resolved "https://registry.npmmirror.com/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz" + integrity sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ== + dependencies: + "@types/react" "*" + prop-types "^15.6.2" + +react-router-config@^5.1.1: + version "5.1.1" + resolved "https://registry.npmmirror.com/react-router-config/-/react-router-config-5.1.1.tgz" + integrity sha512-DuanZjaD8mQp1ppHjgnnUnyOlqYXZVjnov/JzFhjLEwd3Z4dYjMSnqrEzzGThH47vpCOqPPwJM2FtthLeJ8Pbg== + dependencies: + "@babel/runtime" "^7.1.2" + +react-router-dom@^5.2.0: + version "5.3.4" + resolved "https://registry.npmmirror.com/react-router-dom/-/react-router-dom-5.3.4.tgz" + integrity sha512-m4EqFMHv/Ih4kpcBCONHbkT68KoAeHN4p3lAGoNryfHi0dMy0kCzEZakiKRsvg5wHZ/JLrLW8o8KomWiz/qbYQ== + dependencies: + "@babel/runtime" "^7.12.13" + history "^4.9.0" + loose-envify "^1.3.1" + prop-types "^15.6.2" + react-router "5.3.4" + tiny-invariant "^1.0.2" + tiny-warning "^1.0.0" + +react-router@5.3.4, react-router@^5.2.0: + version "5.3.4" + resolved "https://registry.npmmirror.com/react-router/-/react-router-5.3.4.tgz" + integrity sha512-Ys9K+ppnJah3QuaRiLxk+jDWOR1MekYQrlytiXxC1RyfbdsZkS5pvKAzCCr031xHixZwpnsYNT5xysdFHQaYsA== + dependencies: + "@babel/runtime" "^7.12.13" + history "^4.9.0" + hoist-non-react-statics "^3.1.0" + loose-envify "^1.3.1" + path-to-regexp "^1.7.0" + prop-types "^15.6.2" + react-is "^16.6.0" + tiny-invariant "^1.0.2" + tiny-warning "^1.0.0" + +react-side-effect@^2.1.0: + version "2.1.2" + resolved "https://registry.npmmirror.com/react-side-effect/-/react-side-effect-2.1.2.tgz" + integrity sha512-PVjOcvVOyIILrYoyGEpDN3vmYNLdy1CajSFNt4TDsVQC5KpTijDvWVoR+/7Rz2xT978D8/ZtFceXxzsPwZEDvw== + +react-textarea-autosize@^8.3.2: + version "8.3.4" + resolved "https://registry.npmmirror.com/react-textarea-autosize/-/react-textarea-autosize-8.3.4.tgz" + integrity sha512-CdtmP8Dc19xL8/R6sWvtknD/eCXkQr30dtvC4VmGInhRsfF8X/ihXCq6+9l9qbxmKRiq407/7z5fxE7cVWQNgQ== + dependencies: + "@babel/runtime" "^7.10.2" + use-composed-ref "^1.3.0" + use-latest "^1.2.1" + +react@^17.0.1: + version "17.0.2" + resolved "https://registry.npmmirror.com/react/-/react-17.0.2.tgz" + integrity sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA== + dependencies: + loose-envify "^1.1.0" + object-assign "^4.1.1" + +readable-stream@^2.0.1: + version "2.3.7" + resolved "https://registry.npmmirror.com/readable-stream/-/readable-stream-2.3.7.tgz" + integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readable-stream@^3.0.6: + version "3.6.0" + resolved "https://registry.npmmirror.com/readable-stream/-/readable-stream-3.6.0.tgz" + integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + +readdirp@~3.6.0: + version "3.6.0" + resolved "https://registry.npmmirror.com/readdirp/-/readdirp-3.6.0.tgz" + integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== + dependencies: + picomatch "^2.2.1" + +reading-time@^1.5.0: + version "1.5.0" + resolved "https://registry.npmmirror.com/reading-time/-/reading-time-1.5.0.tgz" + integrity sha512-onYyVhBNr4CmAxFsKS7bz+uTLRakypIe4R+5A824vBSkQy/hB3fZepoVEf8OVAxzLvK+H/jm9TzpI3ETSm64Kg== + +rechoir@^0.6.2: + version "0.6.2" + resolved "https://registry.npmmirror.com/rechoir/-/rechoir-0.6.2.tgz" + integrity sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw== + dependencies: + resolve "^1.1.6" + +recursive-readdir@^2.2.2: + version "2.2.3" + resolved "https://registry.npmmirror.com/recursive-readdir/-/recursive-readdir-2.2.3.tgz" + integrity sha512-8HrF5ZsXk5FAH9dgsx3BlUer73nIhuj+9OrQwEbLTPOBzGkL1lsFCR01am+v+0m2Cmbs1nP12hLDl5FA7EszKA== + dependencies: + minimatch "^3.0.5" + +regenerate-unicode-properties@^10.1.0: + version "10.1.0" + resolved "https://registry.npmmirror.com/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.0.tgz" + integrity sha512-d1VudCLoIGitcU/hEg2QqvyGZQmdC0Lf8BqdOMXGFSvJP4bNV1+XqbPQeHHLD51Jh4QJJ225dlIFvY4Ly6MXmQ== + dependencies: + regenerate "^1.4.2" + +regenerate-unicode-properties@^9.0.0: + version "9.0.0" + resolved "https://registry.npmmirror.com/regenerate-unicode-properties/-/regenerate-unicode-properties-9.0.0.tgz" + integrity sha512-3E12UeNSPfjrgwjkR81m5J7Aw/T55Tu7nUyZVQYCKEOs+2dkxEY+DpPtZzO4YruuiPb7NkYLVcyJC4+zCbk5pA== + dependencies: + regenerate "^1.4.2" + +regenerate@^1.4.2: + version "1.4.2" + resolved "https://registry.npmmirror.com/regenerate/-/regenerate-1.4.2.tgz" + integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== + +regenerator-runtime@^0.13.10: + version "0.13.10" + resolved "https://registry.npmmirror.com/regenerator-runtime/-/regenerator-runtime-0.13.10.tgz" + integrity sha512-KepLsg4dU12hryUO7bp/axHAKvwGOCV0sGloQtpagJ12ai+ojVDqkeGSiRX1zlq+kjIMZ1t7gpze+26QqtdGqw== + +regenerator-transform@^0.15.0: + version "0.15.0" + resolved "https://registry.npmmirror.com/regenerator-transform/-/regenerator-transform-0.15.0.tgz" + integrity sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg== + dependencies: + "@babel/runtime" "^7.8.4" + +regexp.prototype.flags@^1.4.3: + version "1.4.3" + resolved "https://registry.npmmirror.com/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz" + integrity sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + functions-have-names "^1.2.2" + +regexpu-core@^4.5.4: + version "4.8.0" + resolved "https://registry.npmmirror.com/regexpu-core/-/regexpu-core-4.8.0.tgz" + integrity sha512-1F6bYsoYiz6is+oz70NWur2Vlh9KWtswuRuzJOfeYUrfPX2o8n74AnUVaOGDbUqVGO9fNHu48/pjJO4sNVwsOg== + dependencies: + regenerate "^1.4.2" + regenerate-unicode-properties "^9.0.0" + regjsgen "^0.5.2" + regjsparser "^0.7.0" + unicode-match-property-ecmascript "^2.0.0" + unicode-match-property-value-ecmascript "^2.0.0" + +regexpu-core@^5.1.0: + version "5.2.1" + resolved "https://registry.npmmirror.com/regexpu-core/-/regexpu-core-5.2.1.tgz" + integrity sha512-HrnlNtpvqP1Xkb28tMhBUO2EbyUHdQlsnlAhzWcwHy8WJR53UWr7/MAvqrsQKMbV4qdpv03oTMG8iIhfsPFktQ== + dependencies: + regenerate "^1.4.2" + regenerate-unicode-properties "^10.1.0" + regjsgen "^0.7.1" + regjsparser "^0.9.1" + unicode-match-property-ecmascript "^2.0.0" + unicode-match-property-value-ecmascript "^2.0.0" + +registry-auth-token@^4.0.0: + version "4.2.2" + resolved "https://registry.npmmirror.com/registry-auth-token/-/registry-auth-token-4.2.2.tgz" + integrity sha512-PC5ZysNb42zpFME6D/XlIgtNGdTl8bBOCw90xQLVMpzuuubJKYDWFAEuUNc+Cn8Z8724tg2SDhDRrkVEsqfDMg== + dependencies: + rc "1.2.8" + +registry-url@^5.0.0: + version "5.1.0" + resolved "https://registry.npmmirror.com/registry-url/-/registry-url-5.1.0.tgz" + integrity sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw== + dependencies: + rc "^1.2.8" + +regjsgen@^0.5.2: + version "0.5.2" + resolved "https://registry.npmmirror.com/regjsgen/-/regjsgen-0.5.2.tgz" + integrity sha512-OFFT3MfrH90xIW8OOSyUrk6QHD5E9JOTeGodiJeBS3J6IwlgzJMNE/1bZklWz5oTg+9dCMyEetclvCVXOPoN3A== + +regjsgen@^0.7.1: + version "0.7.1" + resolved "https://registry.npmmirror.com/regjsgen/-/regjsgen-0.7.1.tgz" + integrity sha512-RAt+8H2ZEzHeYWxZ3H2z6tF18zyyOnlcdaafLrm21Bguj7uZy6ULibiAFdXEtKQY4Sy7wDTwDiOazasMLc4KPA== + +regjsparser@^0.7.0: + version "0.7.0" + resolved "https://registry.npmmirror.com/regjsparser/-/regjsparser-0.7.0.tgz" + integrity sha512-A4pcaORqmNMDVwUjWoTzuhwMGpP+NykpfqAsEgI1FSH/EzC7lrN5TMd+kN8YCovX+jMpu8eaqXgXPCa0g8FQNQ== + dependencies: + jsesc "~0.5.0" + +regjsparser@^0.9.1: + version "0.9.1" + resolved "https://registry.npmmirror.com/regjsparser/-/regjsparser-0.9.1.tgz" + integrity sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ== + dependencies: + jsesc "~0.5.0" + +rehype-katex@4: + version "4.0.0" + resolved "https://registry.npmmirror.com/rehype-katex/-/rehype-katex-4.0.0.tgz" + integrity sha512-0mgBqYugQyIW0eUl6RDOZ28Cat2YzrnWGaYgKCMQnJw6ClmKgLqXBnkDAPGh2mwxvkkKwQOUMUpSLpA5rt7rzA== + dependencies: + "@types/katex" "^0.11.0" + hast-util-to-text "^2.0.0" + katex "^0.12.0" + rehype-parse "^7.0.0" + unified "^9.0.0" + unist-util-visit "^2.0.0" + +rehype-parse@^6.0.2: + version "6.0.2" + resolved "https://registry.npmmirror.com/rehype-parse/-/rehype-parse-6.0.2.tgz" + integrity sha512-0S3CpvpTAgGmnz8kiCyFLGuW5yA4OQhyNTm/nwPopZ7+PI11WnGl1TTWTGv/2hPEe/g2jRLlhVVSsoDH8waRug== + dependencies: + hast-util-from-parse5 "^5.0.0" + parse5 "^5.0.0" + xtend "^4.0.0" + +rehype-parse@^7.0.0: + version "7.0.1" + resolved "https://registry.npmmirror.com/rehype-parse/-/rehype-parse-7.0.1.tgz" + integrity sha512-fOiR9a9xH+Le19i4fGzIEowAbwG7idy2Jzs4mOrFWBSJ0sNUgy0ev871dwWnbOo371SjgjG4pwzrbgSVrKxecw== + dependencies: + hast-util-from-parse5 "^6.0.0" + parse5 "^6.0.0" + +relateurl@^0.2.7: + version "0.2.7" + resolved "https://registry.npmmirror.com/relateurl/-/relateurl-0.2.7.tgz" + integrity sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog== + +remark-admonitions@^1.2.1: + version "1.2.1" + resolved "https://registry.npmmirror.com/remark-admonitions/-/remark-admonitions-1.2.1.tgz" + integrity sha512-Ji6p68VDvD+H1oS95Fdx9Ar5WA2wcDA4kwrrhVU7fGctC6+d3uiMICu7w7/2Xld+lnU7/gi+432+rRbup5S8ow== + dependencies: + rehype-parse "^6.0.2" + unified "^8.4.2" + unist-util-visit "^2.0.1" + +remark-emoji@^2.1.0: + version "2.2.0" + resolved "https://registry.npmmirror.com/remark-emoji/-/remark-emoji-2.2.0.tgz" + integrity sha512-P3cj9s5ggsUvWw5fS2uzCHJMGuXYRb0NnZqYlNecewXt8QBU9n5vW3DUUKOhepS8F9CwdMx9B8a3i7pqFWAI5w== + dependencies: + emoticon "^3.2.0" + node-emoji "^1.10.0" + unist-util-visit "^2.0.3" + +remark-footnotes@2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/remark-footnotes/-/remark-footnotes-2.0.0.tgz" + integrity sha512-3Clt8ZMH75Ayjp9q4CorNeyjwIxHFcTkaektplKGl2A1jNGEUey8cKL0ZC5vJwfcD5GFGsNLImLG/NGzWIzoMQ== + +remark-math@3: + version "3.0.1" + resolved "https://registry.npmmirror.com/remark-math/-/remark-math-3.0.1.tgz" + integrity sha512-epT77R/HK0x7NqrWHdSV75uNLwn8g9qTyMqCRCDujL0vj/6T6+yhdrR7mjELWtkse+Fw02kijAaBuVcHBor1+Q== + +remark-mdx-remove-exports@^1.6.22: + version "1.6.22" + resolved "https://registry.npmmirror.com/remark-mdx-remove-exports/-/remark-mdx-remove-exports-1.6.22.tgz" + integrity sha512-7g2uiTmTGfz5QyVb+toeX25frbk1Y6yd03RXGPtqx0+DVh86Gb7MkNYbk7H2X27zdZ3CQv1W/JqlFO0Oo8IxVA== + dependencies: + unist-util-remove "2.0.0" + +remark-mdx-remove-imports@^1.6.22: + version "1.6.22" + resolved "https://registry.npmmirror.com/remark-mdx-remove-imports/-/remark-mdx-remove-imports-1.6.22.tgz" + integrity sha512-lmjAXD8Ltw0TsvBzb45S+Dxx7LTJAtDaMneMAv8LAUIPEyYoKkmGbmVsiF0/pY6mhM1Q16swCmu1TN+ie/vn/A== + dependencies: + unist-util-remove "2.0.0" + +remark-mdx@1.6.22: + version "1.6.22" + resolved "https://registry.npmmirror.com/remark-mdx/-/remark-mdx-1.6.22.tgz" + integrity sha512-phMHBJgeV76uyFkH4rvzCftLfKCr2RZuF+/gmVcaKrpsihyzmhXjA0BEMDaPTXG5y8qZOKPVo83NAOX01LPnOQ== + dependencies: + "@babel/core" "7.12.9" + "@babel/helper-plugin-utils" "7.10.4" + "@babel/plugin-proposal-object-rest-spread" "7.12.1" + "@babel/plugin-syntax-jsx" "7.12.1" + "@mdx-js/util" "1.6.22" + is-alphabetical "1.0.4" + remark-parse "8.0.3" + unified "9.2.0" + +remark-parse@8.0.3: + version "8.0.3" + resolved "https://registry.npmmirror.com/remark-parse/-/remark-parse-8.0.3.tgz" + integrity sha512-E1K9+QLGgggHxCQtLt++uXltxEprmWzNfg+MxpfHsZlrddKzZ/hZyWHDbK3/Ap8HJQqYJRXP+jHczdL6q6i85Q== + dependencies: + ccount "^1.0.0" + collapse-white-space "^1.0.2" + is-alphabetical "^1.0.0" + is-decimal "^1.0.0" + is-whitespace-character "^1.0.0" + is-word-character "^1.0.0" + markdown-escapes "^1.0.0" + parse-entities "^2.0.0" + repeat-string "^1.5.4" + state-toggle "^1.0.0" + trim "0.0.1" + trim-trailing-lines "^1.0.0" + unherit "^1.0.4" + unist-util-remove-position "^2.0.0" + vfile-location "^3.0.0" + xtend "^4.0.1" + +remark-squeeze-paragraphs@4.0.0: + version "4.0.0" + resolved "https://registry.npmmirror.com/remark-squeeze-paragraphs/-/remark-squeeze-paragraphs-4.0.0.tgz" + integrity sha512-8qRqmL9F4nuLPIgl92XUuxI3pFxize+F1H0e/W3llTk0UsjJaj01+RrirkMw7P21RKe4X6goQhYRSvNWX+70Rw== + dependencies: + mdast-squeeze-paragraphs "^4.0.0" + +renderkid@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/renderkid/-/renderkid-3.0.0.tgz" + integrity sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg== + dependencies: + css-select "^4.1.3" + dom-converter "^0.2.0" + htmlparser2 "^6.1.0" + lodash "^4.17.21" + strip-ansi "^6.0.1" + +repeat-string@^1.0.0, repeat-string@^1.5.4: + version "1.6.1" + resolved "https://registry.npmmirror.com/repeat-string/-/repeat-string-1.6.1.tgz" + integrity sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w== + +require-from-string@^2.0.2: + version "2.0.2" + resolved "https://registry.npmmirror.com/require-from-string/-/require-from-string-2.0.2.tgz" + integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== + +"require-like@>= 0.1.1": + version "0.1.2" + resolved "https://registry.npmmirror.com/require-like/-/require-like-0.1.2.tgz" + integrity sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A== + +requires-port@^1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/requires-port/-/requires-port-1.0.0.tgz" + integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== + +resolve-alpn@^1.0.0: + version "1.2.1" + resolved "https://registry.npmmirror.com/resolve-alpn/-/resolve-alpn-1.2.1.tgz" + integrity sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g== + +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.npmmirror.com/resolve-from/-/resolve-from-4.0.0.tgz" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve-pathname@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/resolve-pathname/-/resolve-pathname-3.0.0.tgz" + integrity sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng== + +resolve@^1.1.6, resolve@^1.14.2, resolve@^1.3.2: + version "1.22.1" + resolved "https://registry.npmmirror.com/resolve/-/resolve-1.22.1.tgz" + integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== + dependencies: + is-core-module "^2.9.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +responselike@^2.0.0: + version "2.0.1" + resolved "https://registry.npmmirror.com/responselike/-/responselike-2.0.1.tgz" + integrity sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw== + dependencies: + lowercase-keys "^2.0.0" + +retry@^0.13.1: + version "0.13.1" + resolved "https://registry.npmmirror.com/retry/-/retry-0.13.1.tgz" + integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== + +reusify@^1.0.4: + version "1.0.4" + resolved "https://registry.npmmirror.com/reusify/-/reusify-1.0.4.tgz" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.npmmirror.com/rimraf/-/rimraf-3.0.2.tgz" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +rtl-detect@^1.0.4: + version "1.0.4" + resolved "https://registry.npmmirror.com/rtl-detect/-/rtl-detect-1.0.4.tgz" + integrity sha512-EBR4I2VDSSYr7PkBmFy04uhycIpDKp+21p/jARYXlCSjQksTBQcJ0HFUPOO79EPPH5JS6VAhiIQbycf0O3JAxQ== + +rtlcss@^3.3.0: + version "3.5.0" + resolved "https://registry.npmmirror.com/rtlcss/-/rtlcss-3.5.0.tgz" + integrity sha512-wzgMaMFHQTnyi9YOwsx9LjOxYXJPzS8sYnFaKm6R5ysvTkwzHiB0vxnbHwchHQT65PTdBjDG21/kQBWI7q9O7A== + dependencies: + find-up "^5.0.0" + picocolors "^1.0.0" + postcss "^8.3.11" + strip-json-comments "^3.1.1" + +run-parallel@^1.1.9: + version "1.2.0" + resolved "https://registry.npmmirror.com/run-parallel/-/run-parallel-1.2.0.tgz" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + +rxjs@^7.5.4: + version "7.5.7" + resolved "https://registry.npmmirror.com/rxjs/-/rxjs-7.5.7.tgz" + integrity sha512-z9MzKh/UcOqB3i20H6rtrlaE/CgjLOvheWK/9ILrbhROGTweAi1BaFsTT9FbwZi5Trr1qNRs+MXkhmR06awzQA== + dependencies: + tslib "^2.1.0" + +safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.npmmirror.com/safe-buffer/-/safe-buffer-5.1.2.tgz" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.1.0, safe-buffer@~5.2.0: + version "5.2.1" + resolved "https://registry.npmmirror.com/safe-buffer/-/safe-buffer-5.2.1.tgz" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +safe-regex-test@^1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/safe-regex-test/-/safe-regex-test-1.0.0.tgz" + integrity sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.3" + is-regex "^1.1.4" + +"safer-buffer@>= 2.1.2 < 3": + version "2.1.2" + resolved "https://registry.npmmirror.com/safer-buffer/-/safer-buffer-2.1.2.tgz" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +sax@^1.2.4, sax@~1.2.4: + version "1.2.4" + resolved "https://registry.npmmirror.com/sax/-/sax-1.2.4.tgz" + integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== + +scheduler@^0.20.2: + version "0.20.2" + resolved "https://registry.npmmirror.com/scheduler/-/scheduler-0.20.2.tgz" + integrity sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ== + dependencies: + loose-envify "^1.1.0" + object-assign "^4.1.1" + +schema-utils@2.7.0: + version "2.7.0" + resolved "https://registry.npmmirror.com/schema-utils/-/schema-utils-2.7.0.tgz" + integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== + dependencies: + "@types/json-schema" "^7.0.4" + ajv "^6.12.2" + ajv-keywords "^3.4.1" + +schema-utils@^2.6.5: + version "2.7.1" + resolved "https://registry.npmmirror.com/schema-utils/-/schema-utils-2.7.1.tgz" + integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg== + dependencies: + "@types/json-schema" "^7.0.5" + ajv "^6.12.4" + ajv-keywords "^3.5.2" + +schema-utils@^3.0.0, schema-utils@^3.1.1: + version "3.1.1" + resolved "https://registry.npmmirror.com/schema-utils/-/schema-utils-3.1.1.tgz" + integrity sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw== + dependencies: + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" + +schema-utils@^3.2.0: + version "3.3.0" + resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz" + integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== + dependencies: + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" + +schema-utils@^4.0.0: + version "4.0.0" + resolved "https://registry.npmmirror.com/schema-utils/-/schema-utils-4.0.0.tgz" + integrity sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg== + dependencies: + "@types/json-schema" "^7.0.9" + ajv "^8.8.0" + ajv-formats "^2.1.1" + ajv-keywords "^5.0.0" + +section-matter@^1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/section-matter/-/section-matter-1.0.0.tgz" + integrity sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA== + dependencies: + extend-shallow "^2.0.1" + kind-of "^6.0.0" + +select-hose@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/select-hose/-/select-hose-2.0.0.tgz" + integrity sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg== + +selfsigned@^2.1.1: + version "2.1.1" + resolved "https://registry.npmmirror.com/selfsigned/-/selfsigned-2.1.1.tgz" + integrity sha512-GSL3aowiF7wa/WtSFwnUrludWFoNhftq8bUkH9pkzjpN2XSPOAYEgg6e0sS9s0rZwgJzJiQRPU18A6clnoW5wQ== + dependencies: + node-forge "^1" + +semver-diff@^3.1.1: + version "3.1.1" + resolved "https://registry.npmmirror.com/semver-diff/-/semver-diff-3.1.1.tgz" + integrity sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg== + dependencies: + semver "^6.3.0" + +semver@^5.4.1: + version "5.7.2" + resolved "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz" + integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== + +semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0: + version "6.3.1" + resolved "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== + +semver@^7.3.2, semver@^7.3.4, semver@^7.3.5: + version "7.5.4" + resolved "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz" + integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== + dependencies: + lru-cache "^6.0.0" + +send@0.18.0: + version "0.18.0" + resolved "https://registry.npmmirror.com/send/-/send-0.18.0.tgz" + integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== + dependencies: + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + fresh "0.5.2" + http-errors "2.0.0" + mime "1.6.0" + ms "2.1.3" + on-finished "2.4.1" + range-parser "~1.2.1" + statuses "2.0.1" + +serialize-javascript@^6.0.0, serialize-javascript@^6.0.1: + version "6.0.2" + resolved "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz" + integrity sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g== + dependencies: + randombytes "^2.1.0" + +serve-handler@^6.1.3: + version "6.1.5" + resolved "https://registry.npmmirror.com/serve-handler/-/serve-handler-6.1.5.tgz" + integrity sha512-ijPFle6Hwe8zfmBxJdE+5fta53fdIY0lHISJvuikXB3VYFafRjMRpOffSPvCYsbKyBA7pvy9oYr/BT1O3EArlg== + dependencies: + bytes "3.0.0" + content-disposition "0.5.2" + fast-url-parser "1.1.3" + mime-types "2.1.18" + minimatch "3.1.2" + path-is-inside "1.0.2" + path-to-regexp "2.2.1" + range-parser "1.2.0" + +serve-index@^1.9.1: + version "1.9.1" + resolved "https://registry.npmmirror.com/serve-index/-/serve-index-1.9.1.tgz" + integrity sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw== + dependencies: + accepts "~1.3.4" + batch "0.6.1" + debug "2.6.9" + escape-html "~1.0.3" + http-errors "~1.6.2" + mime-types "~2.1.17" + parseurl "~1.3.2" + +serve-static@1.15.0: + version "1.15.0" + resolved "https://registry.npmmirror.com/serve-static/-/serve-static-1.15.0.tgz" + integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== + dependencies: + encodeurl "~1.0.2" + escape-html "~1.0.3" + parseurl "~1.3.3" + send "0.18.0" + +setimmediate@^1.0.5: + version "1.0.5" + resolved "https://registry.npmmirror.com/setimmediate/-/setimmediate-1.0.5.tgz" + integrity sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA== + +setprototypeof@1.1.0: + version "1.1.0" + resolved "https://registry.npmmirror.com/setprototypeof/-/setprototypeof-1.1.0.tgz" + integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== + +setprototypeof@1.2.0: + version "1.2.0" + resolved "https://registry.npmmirror.com/setprototypeof/-/setprototypeof-1.2.0.tgz" + integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== + +shallow-clone@^3.0.0: + version "3.0.1" + resolved "https://registry.npmmirror.com/shallow-clone/-/shallow-clone-3.0.1.tgz" + integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA== + dependencies: + kind-of "^6.0.2" + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/shebang-command/-/shebang-command-2.0.0.tgz" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/shebang-regex/-/shebang-regex-3.0.0.tgz" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +shell-quote@^1.7.2: + version "1.7.4" + resolved "https://registry.npmmirror.com/shell-quote/-/shell-quote-1.7.4.tgz" + integrity sha512-8o/QEhSSRb1a5i7TFR0iM4G16Z0vYB2OQVs4G3aAFXjn3T6yEx8AZxy1PgDF7I00LZHYA3WxaSYIf5e5sAX8Rw== + +shelljs@^0.8.4, shelljs@^0.8.5: + version "0.8.5" + resolved "https://registry.npmmirror.com/shelljs/-/shelljs-0.8.5.tgz" + integrity sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow== + dependencies: + glob "^7.0.0" + interpret "^1.0.0" + rechoir "^0.6.2" + +side-channel@^1.0.4: + version "1.0.4" + resolved "https://registry.npmmirror.com/side-channel/-/side-channel-1.0.4.tgz" + integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + dependencies: + call-bind "^1.0.0" + get-intrinsic "^1.0.2" + object-inspect "^1.9.0" + +signal-exit@^3.0.2, signal-exit@^3.0.3: + version "3.0.7" + resolved "https://registry.npmmirror.com/signal-exit/-/signal-exit-3.0.7.tgz" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== + +sirv@^1.0.7: + version "1.0.19" + resolved "https://registry.npmmirror.com/sirv/-/sirv-1.0.19.tgz" + integrity sha512-JuLThK3TnZG1TAKDwNIqNq6QA2afLOCcm+iE8D1Kj3GA40pSPsxQjjJl0J8X3tsR7T+CP1GavpzLwYkgVLWrZQ== + dependencies: + "@polka/url" "^1.0.0-next.20" + mrmime "^1.0.0" + totalist "^1.0.0" + +sisteransi@^1.0.5: + version "1.0.5" + resolved "https://registry.npmmirror.com/sisteransi/-/sisteransi-1.0.5.tgz" + integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== + +sitemap@^7.0.0: + version "7.1.1" + resolved "https://registry.npmmirror.com/sitemap/-/sitemap-7.1.1.tgz" + integrity sha512-mK3aFtjz4VdJN0igpIJrinf3EO8U8mxOPsTBzSsy06UtjZQJ3YY3o3Xa7zSc5nMqcMrRwlChHZ18Kxg0caiPBg== + dependencies: + "@types/node" "^17.0.5" + "@types/sax" "^1.2.1" + arg "^5.0.0" + sax "^1.2.4" + +slash@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/slash/-/slash-3.0.0.tgz" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +sockjs@^0.3.24: + version "0.3.24" + resolved "https://registry.npmmirror.com/sockjs/-/sockjs-0.3.24.tgz" + integrity sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ== + dependencies: + faye-websocket "^0.11.3" + uuid "^8.3.2" + websocket-driver "^0.7.4" + +sort-css-media-queries@2.1.0: + version "2.1.0" + resolved "https://registry.npmmirror.com/sort-css-media-queries/-/sort-css-media-queries-2.1.0.tgz" + integrity sha512-IeWvo8NkNiY2vVYdPa27MCQiR0MN0M80johAYFVxWWXQ44KU84WNxjslwBHmc/7ZL2ccwkM7/e6S5aiKZXm7jA== + +source-list-map@^2.0.0: + version "2.0.1" + resolved "https://registry.npmmirror.com/source-list-map/-/source-list-map-2.0.1.tgz" + integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== + +source-map-js@^1.0.2: + version "1.0.2" + resolved "https://registry.npmmirror.com/source-map-js/-/source-map-js-1.0.2.tgz" + integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== + +source-map-support@~0.5.20: + version "0.5.21" + resolved "https://registry.npmmirror.com/source-map-support/-/source-map-support-0.5.21.tgz" + integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map@^0.5.0: + version "0.5.7" + resolved "https://registry.npmmirror.com/source-map/-/source-map-0.5.7.tgz" + integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ== + +source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: + version "0.6.1" + resolved "https://registry.npmmirror.com/source-map/-/source-map-0.6.1.tgz" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +sourcemap-codec@^1.4.8: + version "1.4.8" + resolved "https://registry.npmmirror.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz" + integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA== + +space-separated-tokens@^1.0.0: + version "1.1.5" + resolved "https://registry.npmmirror.com/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz" + integrity sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA== + +spdy-transport@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/spdy-transport/-/spdy-transport-3.0.0.tgz" + integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== + dependencies: + debug "^4.1.0" + detect-node "^2.0.4" + hpack.js "^2.1.6" + obuf "^1.1.2" + readable-stream "^3.0.6" + wbuf "^1.7.3" + +spdy@^4.0.2: + version "4.0.2" + resolved "https://registry.npmmirror.com/spdy/-/spdy-4.0.2.tgz" + integrity sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA== + dependencies: + debug "^4.1.0" + handle-thing "^2.0.0" + http-deceiver "^1.2.7" + select-hose "^2.0.0" + spdy-transport "^3.0.0" + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.npmmirror.com/sprintf-js/-/sprintf-js-1.0.3.tgz" + integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== + +stable@^0.1.8: + version "0.1.8" + resolved "https://registry.npmmirror.com/stable/-/stable-0.1.8.tgz" + integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== + +state-toggle@^1.0.0: + version "1.0.3" + resolved "https://registry.npmmirror.com/state-toggle/-/state-toggle-1.0.3.tgz" + integrity sha512-d/5Z4/2iiCnHw6Xzghyhb+GcmF89bxwgXG60wjIiZaxnymbyOmI8Hk4VqHXiVVp6u2ysaskFfXg3ekCj4WNftQ== + +statuses@2.0.1: + version "2.0.1" + resolved "https://registry.npmmirror.com/statuses/-/statuses-2.0.1.tgz" + integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== + +"statuses@>= 1.4.0 < 2": + version "1.5.0" + resolved "https://registry.npmmirror.com/statuses/-/statuses-1.5.0.tgz" + integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== + +std-env@^2.2.1: + version "2.3.1" + resolved "https://registry.npmmirror.com/std-env/-/std-env-2.3.1.tgz" + integrity sha512-eOsoKTWnr6C8aWrqJJ2KAReXoa7Vn5Ywyw6uCXgA/xDhxPoaIsBa5aNJmISY04dLwXPBnDHW4diGM7Sn5K4R/g== + dependencies: + ci-info "^3.1.1" + +std-env@^3.0.1: + version "3.3.0" + resolved "https://registry.npmmirror.com/std-env/-/std-env-3.3.0.tgz" + integrity sha512-cNNS+VYsXIs5gI6gJipO4qZ8YYT274JHvNnQ1/R/x8Q8mdP0qj0zoMchRXmBNPqp/0eOEhX+3g7g6Fgb7meLIQ== + +string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.2: + version "4.2.3" + resolved "https://registry.npmmirror.com/string-width/-/string-width-4.2.3.tgz" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string.prototype.trimend@^1.0.5: + version "1.0.5" + resolved "https://registry.npmmirror.com/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz" + integrity sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string.prototype.trimstart@^1.0.5: + version "1.0.5" + resolved "https://registry.npmmirror.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz" + integrity sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.npmmirror.com/string_decoder/-/string_decoder-1.3.0.tgz" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.npmmirror.com/string_decoder/-/string_decoder-1.1.1.tgz" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +stringify-object@^3.3.0: + version "3.3.0" + resolved "https://registry.npmmirror.com/stringify-object/-/stringify-object-3.3.0.tgz" + integrity sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw== + dependencies: + get-own-enumerable-property-symbols "^3.0.0" + is-obj "^1.0.1" + is-regexp "^1.0.0" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.npmmirror.com/strip-ansi/-/strip-ansi-6.0.1.tgz" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-bom-string@^1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/strip-bom-string/-/strip-bom-string-1.0.0.tgz" + integrity sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g== + +strip-final-newline@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz" + integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + +strip-json-comments@^3.1.1: + version "3.1.1" + resolved "https://registry.npmmirror.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +strip-json-comments@~2.0.1: + version "2.0.1" + resolved "https://registry.npmmirror.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz" + integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ== + +style-to-object@0.3.0, style-to-object@^0.3.0: + version "0.3.0" + resolved "https://registry.npmmirror.com/style-to-object/-/style-to-object-0.3.0.tgz" + integrity sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA== + dependencies: + inline-style-parser "0.1.1" + +stylehacks@^5.1.1: + version "5.1.1" + resolved "https://registry.npmmirror.com/stylehacks/-/stylehacks-5.1.1.tgz" + integrity sha512-sBpcd5Hx7G6seo7b1LkpttvTz7ikD0LlH5RmdcBNb6fFR0Fl7LQwHDFr300q4cwUqi+IYrFGmsIHieMBfnN/Bw== + dependencies: + browserslist "^4.21.4" + postcss-selector-parser "^6.0.4" + +supports-color@^5.3.0: + version "5.5.0" + resolved "https://registry.npmmirror.com/supports-color/-/supports-color-5.5.0.tgz" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.npmmirror.com/supports-color/-/supports-color-7.2.0.tgz" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-color@^8.0.0: + version "8.1.1" + resolved "https://registry.npmmirror.com/supports-color/-/supports-color-8.1.1.tgz" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +svg-parser@^2.0.2, svg-parser@^2.0.4: + version "2.0.4" + resolved "https://registry.npmmirror.com/svg-parser/-/svg-parser-2.0.4.tgz" + integrity sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ== + +svgo@^1.2.2: + version "1.3.2" + resolved "https://registry.npmmirror.com/svgo/-/svgo-1.3.2.tgz" + integrity sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw== + dependencies: + chalk "^2.4.1" + coa "^2.0.2" + css-select "^2.0.0" + css-select-base-adapter "^0.1.1" + css-tree "1.0.0-alpha.37" + csso "^4.0.2" + js-yaml "^3.13.1" + mkdirp "~0.5.1" + object.values "^1.1.0" + sax "~1.2.4" + stable "^0.1.8" + unquote "~1.1.1" + util.promisify "~1.0.0" + +svgo@^2.7.0, svgo@^2.8.0: + version "2.8.0" + resolved "https://registry.npmmirror.com/svgo/-/svgo-2.8.0.tgz" + integrity sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg== + dependencies: + "@trysound/sax" "0.2.0" + commander "^7.2.0" + css-select "^4.1.3" + css-tree "^1.1.3" + csso "^4.2.0" + picocolors "^1.0.0" + stable "^0.1.8" + +tapable@^1.0.0: + version "1.1.3" + resolved "https://registry.npmmirror.com/tapable/-/tapable-1.1.3.tgz" + integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== + +tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0: + version "2.2.1" + resolved "https://registry.npmmirror.com/tapable/-/tapable-2.2.1.tgz" + integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== + +terser-webpack-plugin@^5.2.4, terser-webpack-plugin@^5.3.10: + version "5.3.10" + resolved "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.10.tgz" + integrity sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w== + dependencies: + "@jridgewell/trace-mapping" "^0.3.20" + jest-worker "^27.4.5" + schema-utils "^3.1.1" + serialize-javascript "^6.0.1" + terser "^5.26.0" + +terser@^5.10.0, terser@^5.26.0: + version "5.31.5" + resolved "https://registry.npmjs.org/terser/-/terser-5.31.5.tgz" + integrity sha512-YPmas0L0rE1UyLL/llTWA0SiDOqIcAQYLeUj7cJYzXHlRTAnMSg9pPe4VJ5PlKvTrPQsdVFuiRiwyeNlYgwh2Q== + dependencies: + "@jridgewell/source-map" "^0.3.3" + acorn "^8.8.2" + commander "^2.20.0" + source-map-support "~0.5.20" + +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.npmmirror.com/text-table/-/text-table-0.2.0.tgz" + integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== + +thunky@^1.0.2: + version "1.1.0" + resolved "https://registry.npmmirror.com/thunky/-/thunky-1.1.0.tgz" + integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== + +tiny-invariant@^1.0.2: + version "1.3.1" + resolved "https://registry.npmmirror.com/tiny-invariant/-/tiny-invariant-1.3.1.tgz" + integrity sha512-AD5ih2NlSssTCwsMznbvwMZpJ1cbhkGd2uueNxzv2jDlEeZdU04JQfRnggJQ8DrcVBGjAsCKwFBbDlVNtEMlzw== + +tiny-warning@^1.0.0: + version "1.0.3" + resolved "https://registry.npmmirror.com/tiny-warning/-/tiny-warning-1.0.3.tgz" + integrity sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA== + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz" + integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.npmmirror.com/to-regex-range/-/to-regex-range-5.0.1.tgz" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +toidentifier@1.0.1: + version "1.0.1" + resolved "https://registry.npmmirror.com/toidentifier/-/toidentifier-1.0.1.tgz" + integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== + +totalist@^1.0.0: + version "1.1.0" + resolved "https://registry.npmmirror.com/totalist/-/totalist-1.1.0.tgz" + integrity sha512-gduQwd1rOdDMGxFG1gEvhV88Oirdo2p+KjoYFU7k2g+i7n6AFFbDQ5kMPUsW0pNbfQsB/cwXvT1i4Bue0s9g5g== + +tr46@~0.0.3: + version "0.0.3" + resolved "https://registry.npmmirror.com/tr46/-/tr46-0.0.3.tgz" + integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== + +trim-trailing-lines@^1.0.0: + version "1.1.4" + resolved "https://registry.npmmirror.com/trim-trailing-lines/-/trim-trailing-lines-1.1.4.tgz" + integrity sha512-rjUWSqnfTNrjbB9NQWfPMH/xRK1deHeGsHoVfpxJ++XeYXE0d6B1En37AHfw3jtfTU7dzMzZL2jjpe8Qb5gLIQ== + +trim@0.0.1, trim@0.0.3, trim@^0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/trim/-/trim-0.0.3.tgz#05243a47a3a4113e6b49367880a9cca59697a20b" + integrity sha512-h82ywcYhHK7veeelXrCScdH7HkWfbIT1D/CgYO+nmDarz3SGNssVBMws6jU16Ga60AJCRAvPV6w6RLuNerQqjg== + +trough@^1.0.0: + version "1.0.5" + resolved "https://registry.npmmirror.com/trough/-/trough-1.0.5.tgz" + integrity sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA== + +tslib@^2.0.3, tslib@^2.1.0, tslib@^2.2.0, tslib@^2.3.1, tslib@^2.4.0: + version "2.4.1" + resolved "https://registry.npmmirror.com/tslib/-/tslib-2.4.1.tgz" + integrity sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA== + +type-fest@^0.20.2: + version "0.20.2" + resolved "https://registry.npmmirror.com/type-fest/-/type-fest-0.20.2.tgz" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== + +type-is@~1.6.18: + version "1.6.18" + resolved "https://registry.npmmirror.com/type-is/-/type-is-1.6.18.tgz" + integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== + dependencies: + media-typer "0.3.0" + mime-types "~2.1.24" + +typedarray-to-buffer@^3.1.5: + version "3.1.5" + resolved "https://registry.npmmirror.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz" + integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== + dependencies: + is-typedarray "^1.0.0" + +ua-parser-js@^0.7.30: + version "0.7.33" + resolved "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.33.tgz" + integrity sha512-s8ax/CeZdK9R/56Sui0WM6y9OFREJarMRHqLB2EwkovemBxNQ+Bqu8GAsUnVcXKgphb++ghr/B2BZx4mahujPw== + +unbox-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.npmmirror.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz" + integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== + dependencies: + call-bind "^1.0.2" + has-bigints "^1.0.2" + has-symbols "^1.0.3" + which-boxed-primitive "^1.0.2" + +unherit@^1.0.4: + version "1.1.3" + resolved "https://registry.npmmirror.com/unherit/-/unherit-1.1.3.tgz" + integrity sha512-Ft16BJcnapDKp0+J/rqFC3Rrk6Y/Ng4nzsC028k2jdDII/rdZ7Wd3pPT/6+vIIxRagwRc9K0IUX0Ra4fKvw+WQ== + dependencies: + inherits "^2.0.0" + xtend "^4.0.0" + +unicode-canonical-property-names-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz" + integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== + +unicode-match-property-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz" + integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q== + dependencies: + unicode-canonical-property-names-ecmascript "^2.0.0" + unicode-property-aliases-ecmascript "^2.0.0" + +unicode-match-property-value-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz" + integrity sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw== + +unicode-property-aliases-ecmascript@^2.0.0: + version "2.1.0" + resolved "https://registry.npmmirror.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz" + integrity sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w== + +unified@9.2.0, unified@^9.0.0: + version "9.2.0" + resolved "https://registry.npmmirror.com/unified/-/unified-9.2.0.tgz" + integrity sha512-vx2Z0vY+a3YoTj8+pttM3tiJHCwY5UFbYdiWrwBEbHmK8pvsPj2rtAX2BFfgXen8T39CJWblWRDT4L5WGXtDdg== + dependencies: + bail "^1.0.0" + extend "^3.0.0" + is-buffer "^2.0.0" + is-plain-obj "^2.0.0" + trough "^1.0.0" + vfile "^4.0.0" + +unified@^8.4.2: + version "8.4.2" + resolved "https://registry.npmmirror.com/unified/-/unified-8.4.2.tgz" + integrity sha512-JCrmN13jI4+h9UAyKEoGcDZV+i1E7BLFuG7OsaDvTXI5P0qhHX+vZO/kOhz9jn8HGENDKbwSeB0nVOg4gVStGA== + dependencies: + bail "^1.0.0" + extend "^3.0.0" + is-plain-obj "^2.0.0" + trough "^1.0.0" + vfile "^4.0.0" + +unique-string@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/unique-string/-/unique-string-2.0.0.tgz" + integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== + dependencies: + crypto-random-string "^2.0.0" + +unist-builder@2.0.3, unist-builder@^2.0.0: + version "2.0.3" + resolved "https://registry.npmmirror.com/unist-builder/-/unist-builder-2.0.3.tgz" + integrity sha512-f98yt5pnlMWlzP539tPc4grGMsFaQQlP/vM396b00jngsiINumNmsY8rkXjfoi1c6QaM8nQ3vaGDuoKWbe/1Uw== + +unist-util-find-after@^3.0.0: + version "3.0.0" + resolved "https://registry.npmmirror.com/unist-util-find-after/-/unist-util-find-after-3.0.0.tgz" + integrity sha512-ojlBqfsBftYXExNu3+hHLfJQ/X1jYY/9vdm4yZWjIbf0VuWF6CRufci1ZyoD/wV2TYMKxXUoNuoqwy+CkgzAiQ== + dependencies: + unist-util-is "^4.0.0" + +unist-util-generated@^1.0.0: + version "1.1.6" + resolved "https://registry.npmmirror.com/unist-util-generated/-/unist-util-generated-1.1.6.tgz" + integrity sha512-cln2Mm1/CZzN5ttGK7vkoGw+RZ8VcUH6BtGbq98DDtRGquAAOXig1mrBQYelOwMXYS8rK+vZDyyojSjp7JX+Lg== + +unist-util-is@^4.0.0: + version "4.1.0" + resolved "https://registry.npmmirror.com/unist-util-is/-/unist-util-is-4.1.0.tgz" + integrity sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg== + +unist-util-position@^3.0.0: + version "3.1.0" + resolved "https://registry.npmmirror.com/unist-util-position/-/unist-util-position-3.1.0.tgz" + integrity sha512-w+PkwCbYSFw8vpgWD0v7zRCl1FpY3fjDSQ3/N/wNd9Ffa4gPi8+4keqt99N3XW6F99t/mUzp2xAhNmfKWp95QA== + +unist-util-remove-position@^2.0.0: + version "2.0.1" + resolved "https://registry.npmmirror.com/unist-util-remove-position/-/unist-util-remove-position-2.0.1.tgz" + integrity sha512-fDZsLYIe2uT+oGFnuZmy73K6ZxOPG/Qcm+w7jbEjaFcJgbQ6cqjs/eSPzXhsmGpAsWPkqZM9pYjww5QTn3LHMA== + dependencies: + unist-util-visit "^2.0.0" + +unist-util-remove@2.0.0, unist-util-remove@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/unist-util-remove/-/unist-util-remove-2.0.0.tgz" + integrity sha512-HwwWyNHKkeg/eXRnE11IpzY8JT55JNM1YCwwU9YNCnfzk6s8GhPXrVBBZWiwLeATJbI7euvoGSzcy9M29UeW3g== + dependencies: + unist-util-is "^4.0.0" + +unist-util-stringify-position@^2.0.0: + version "2.0.3" + resolved "https://registry.npmmirror.com/unist-util-stringify-position/-/unist-util-stringify-position-2.0.3.tgz" + integrity sha512-3faScn5I+hy9VleOq/qNbAd6pAx7iH5jYBMS9I1HgQVijz/4mv5Bvw5iw1sC/90CODiKo81G/ps8AJrISn687g== + dependencies: + "@types/unist" "^2.0.2" + +unist-util-visit-parents@^3.0.0: + version "3.1.1" + resolved "https://registry.npmmirror.com/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz" + integrity sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg== + dependencies: + "@types/unist" "^2.0.0" + unist-util-is "^4.0.0" + +unist-util-visit@2.0.3, unist-util-visit@^2.0.0, unist-util-visit@^2.0.1, unist-util-visit@^2.0.2, unist-util-visit@^2.0.3: + version "2.0.3" + resolved "https://registry.npmmirror.com/unist-util-visit/-/unist-util-visit-2.0.3.tgz" + integrity sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q== + dependencies: + "@types/unist" "^2.0.0" + unist-util-is "^4.0.0" + unist-util-visit-parents "^3.0.0" + +universalify@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/universalify/-/universalify-2.0.0.tgz" + integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== + +unpipe@1.0.0, unpipe@~1.0.0: + version "1.0.0" + resolved "https://registry.npmmirror.com/unpipe/-/unpipe-1.0.0.tgz" + integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== + +unquote@~1.1.1: + version "1.1.1" + resolved "https://registry.npmmirror.com/unquote/-/unquote-1.1.1.tgz" + integrity sha512-vRCqFv6UhXpWxZPyGDh/F3ZpNv8/qo7w6iufLpQg9aKnQ71qM4B5KiI7Mia9COcjEhrO9LueHpMYjYzsWH3OIg== + +update-browserslist-db@^1.1.0: + version "1.1.0" + resolved "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz" + integrity sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ== + dependencies: + escalade "^3.1.2" + picocolors "^1.0.1" + +update-notifier@^5.1.0: + version "5.1.0" + resolved "https://registry.npmmirror.com/update-notifier/-/update-notifier-5.1.0.tgz" + integrity sha512-ItnICHbeMh9GqUy31hFPrD1kcuZ3rpxDZbf4KUDavXwS0bW5m7SLbDQpGX3UYr072cbrF5hFUs3r5tUsPwjfHw== + dependencies: + boxen "^5.0.0" + chalk "^4.1.0" + configstore "^5.0.1" + has-yarn "^2.1.0" + import-lazy "^2.1.0" + is-ci "^2.0.0" + is-installed-globally "^0.4.0" + is-npm "^5.0.0" + is-yarn-global "^0.3.0" + latest-version "^5.1.0" + pupa "^2.1.1" + semver "^7.3.4" + semver-diff "^3.1.1" + xdg-basedir "^4.0.0" + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.npmmirror.com/uri-js/-/uri-js-4.4.1.tgz" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +url-loader@^4.1.1: + version "4.1.1" + resolved "https://registry.npmmirror.com/url-loader/-/url-loader-4.1.1.tgz" + integrity sha512-3BTV812+AVHHOJQO8O5MkWgZ5aosP7GnROJwvzLS9hWDj00lZ6Z0wNak423Lp9PBZN05N+Jk/N5Si8jRAlGyWA== + dependencies: + loader-utils "^2.0.0" + mime-types "^2.1.27" + schema-utils "^3.0.0" + +use-composed-ref@^1.3.0: + version "1.3.0" + resolved "https://registry.npmmirror.com/use-composed-ref/-/use-composed-ref-1.3.0.tgz" + integrity sha512-GLMG0Jc/jiKov/3Ulid1wbv3r54K9HlMW29IWcDFPEqFkSO2nS0MuefWgMJpeHQ9YJeXDL3ZUF+P3jdXlZX/cQ== + +use-isomorphic-layout-effect@^1.1.1: + version "1.1.2" + resolved "https://registry.npmmirror.com/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.1.2.tgz" + integrity sha512-49L8yCO3iGT/ZF9QttjwLF/ZD9Iwto5LnH5LmEdk/6cFmXddqi2ulF0edxTwjj+7mqvpVVGQWvbXZdn32wRSHA== + +use-latest@^1.2.1: + version "1.2.1" + resolved "https://registry.npmmirror.com/use-latest/-/use-latest-1.2.1.tgz" + integrity sha512-xA+AVm/Wlg3e2P/JiItTziwS7FK92LWrDB0p+hgXloIMuVCeJJ8v6f0eeHyPZaJrM+usM1FkFfbNCrJGs8A/zw== + dependencies: + use-isomorphic-layout-effect "^1.1.1" + +util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.npmmirror.com/util-deprecate/-/util-deprecate-1.0.2.tgz" + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== + +util.promisify@~1.0.0: + version "1.0.1" + resolved "https://registry.npmmirror.com/util.promisify/-/util.promisify-1.0.1.tgz" + integrity sha512-g9JpC/3He3bm38zsLupWryXHoEcS22YHthuPQSJdMy6KNrzIRzWqcsHzD/WUnqe45whVou4VIsPew37DoXWNrA== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.17.2" + has-symbols "^1.0.1" + object.getownpropertydescriptors "^2.1.0" + +utila@~0.4: + version "0.4.0" + resolved "https://registry.npmmirror.com/utila/-/utila-0.4.0.tgz" + integrity sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA== + +utility-types@^3.10.0: + version "3.10.0" + resolved "https://registry.npmmirror.com/utility-types/-/utility-types-3.10.0.tgz" + integrity sha512-O11mqxmi7wMKCo6HKFt5AhO4BwY3VV68YU07tgxfz8zJTIxr4BpsezN49Ffwy9j3ZpwwJp4fkRwjRzq3uWE6Rg== + +utils-merge@1.0.1: + version "1.0.1" + resolved "https://registry.npmmirror.com/utils-merge/-/utils-merge-1.0.1.tgz" + integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== + +uuid@^8.3.2: + version "8.3.2" + resolved "https://registry.npmmirror.com/uuid/-/uuid-8.3.2.tgz" + integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== + +value-equal@^1.0.1: + version "1.0.1" + resolved "https://registry.npmmirror.com/value-equal/-/value-equal-1.0.1.tgz" + integrity sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw== + +vary@~1.1.2: + version "1.1.2" + resolved "https://registry.npmmirror.com/vary/-/vary-1.1.2.tgz" + integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== + +vfile-location@^3.0.0, vfile-location@^3.2.0: + version "3.2.0" + resolved "https://registry.npmmirror.com/vfile-location/-/vfile-location-3.2.0.tgz" + integrity sha512-aLEIZKv/oxuCDZ8lkJGhuhztf/BW4M+iHdCwglA/eWc+vtuRFJj8EtgceYFX4LRjOhCAAiNHsKGssC6onJ+jbA== + +vfile-message@^2.0.0: + version "2.0.4" + resolved "https://registry.npmmirror.com/vfile-message/-/vfile-message-2.0.4.tgz" + integrity sha512-DjssxRGkMvifUOJre00juHoP9DPWuzjxKuMDrhNbk2TdaYYBNMStsNhEOt3idrtI12VQYM/1+iM0KOzXi4pxwQ== + dependencies: + "@types/unist" "^2.0.0" + unist-util-stringify-position "^2.0.0" + +vfile@^4.0.0: + version "4.2.1" + resolved "https://registry.npmmirror.com/vfile/-/vfile-4.2.1.tgz" + integrity sha512-O6AE4OskCG5S1emQ/4gl8zK586RqA3srz3nfK/Viy0UPToBc5Trp9BVFb1u0CjsKrAWwnpr4ifM/KBXPWwJbCA== + dependencies: + "@types/unist" "^2.0.0" + is-buffer "^2.0.0" + unist-util-stringify-position "^2.0.0" + vfile-message "^2.0.0" + +wait-on@^6.0.0: + version "6.0.1" + resolved "https://registry.npmmirror.com/wait-on/-/wait-on-6.0.1.tgz" + integrity sha512-zht+KASY3usTY5u2LgaNqn/Cd8MukxLGjdcZxT2ns5QzDmTFc4XoWBgC+C/na+sMRZTuVygQoMYwdcVjHnYIVw== + dependencies: + axios "^0.25.0" + joi "^17.6.0" + lodash "^4.17.21" + minimist "^1.2.5" + rxjs "^7.5.4" + +watchpack@^2.4.1: + version "2.4.1" + resolved "https://registry.npmjs.org/watchpack/-/watchpack-2.4.1.tgz" + integrity sha512-8wrBCMtVhqcXP2Sup1ctSkga6uc2Bx0IIvKyT7yTFier5AXHooSI+QyQQAtTb7+E0IUCCKyTFmXqdqgum2XWGg== + dependencies: + glob-to-regexp "^0.4.1" + graceful-fs "^4.1.2" + +wbuf@^1.1.0, wbuf@^1.7.3: + version "1.7.3" + resolved "https://registry.npmmirror.com/wbuf/-/wbuf-1.7.3.tgz" + integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== + dependencies: + minimalistic-assert "^1.0.0" + +web-namespaces@^1.0.0, web-namespaces@^1.1.2: + version "1.1.4" + resolved "https://registry.npmmirror.com/web-namespaces/-/web-namespaces-1.1.4.tgz" + integrity sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw== + +webidl-conversions@^3.0.0: + version "3.0.1" + resolved "https://registry.npmmirror.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz" + integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== + +webpack-bundle-analyzer@^4.4.2: + version "4.7.0" + resolved "https://registry.npmmirror.com/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.7.0.tgz" + integrity sha512-j9b8ynpJS4K+zfO5GGwsAcQX4ZHpWV+yRiHDiL+bE0XHJ8NiPYLTNVQdlFYWxtpg9lfAQNlwJg16J9AJtFSXRg== + dependencies: + acorn "^8.0.4" + acorn-walk "^8.0.0" + chalk "^4.1.0" + commander "^7.2.0" + gzip-size "^6.0.0" + lodash "^4.17.20" + opener "^1.5.2" + sirv "^1.0.7" + ws "^7.3.1" + +webpack-dev-middleware@^5.3.1: + version "5.3.3" + resolved "https://registry.npmmirror.com/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz" + integrity sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA== + dependencies: + colorette "^2.0.10" + memfs "^3.4.3" + mime-types "^2.1.31" + range-parser "^1.2.1" + schema-utils "^4.0.0" + +webpack-dev-server@^4.4.0: + version "4.11.1" + resolved "https://registry.npmmirror.com/webpack-dev-server/-/webpack-dev-server-4.11.1.tgz" + integrity sha512-lILVz9tAUy1zGFwieuaQtYiadImb5M3d+H+L1zDYalYoDl0cksAB1UNyuE5MMWJrG6zR1tXkCP2fitl7yoUJiw== + dependencies: + "@types/bonjour" "^3.5.9" + "@types/connect-history-api-fallback" "^1.3.5" + "@types/express" "^4.17.13" + "@types/serve-index" "^1.9.1" + "@types/serve-static" "^1.13.10" + "@types/sockjs" "^0.3.33" + "@types/ws" "^8.5.1" + ansi-html-community "^0.0.8" + bonjour-service "^1.0.11" + chokidar "^3.5.3" + colorette "^2.0.10" + compression "^1.7.4" + connect-history-api-fallback "^2.0.0" + default-gateway "^6.0.3" + express "^4.17.3" + graceful-fs "^4.2.6" + html-entities "^2.3.2" + http-proxy-middleware "^2.0.3" + ipaddr.js "^2.0.1" + open "^8.0.9" + p-retry "^4.5.0" + rimraf "^3.0.2" + schema-utils "^4.0.0" + selfsigned "^2.1.1" + serve-index "^1.9.1" + sockjs "^0.3.24" + spdy "^4.0.2" + webpack-dev-middleware "^5.3.1" + ws "^8.4.2" + +webpack-merge@^5.8.0: + version "5.8.0" + resolved "https://registry.npmmirror.com/webpack-merge/-/webpack-merge-5.8.0.tgz" + integrity sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q== + dependencies: + clone-deep "^4.0.1" + wildcard "^2.0.0" + +webpack-sources@^1.1.0: + version "1.4.3" + resolved "https://registry.npmmirror.com/webpack-sources/-/webpack-sources-1.4.3.tgz" + integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== + dependencies: + source-list-map "^2.0.0" + source-map "~0.6.1" + +webpack-sources@^3.2.2, webpack-sources@^3.2.3: + version "3.2.3" + resolved "https://registry.npmmirror.com/webpack-sources/-/webpack-sources-3.2.3.tgz" + integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== + +webpack@^5.61.0, webpack@^5.73.0: + version "5.93.0" + resolved "https://registry.npmjs.org/webpack/-/webpack-5.93.0.tgz" + integrity sha512-Y0m5oEY1LRuwly578VqluorkXbvXKh7U3rLoQCEO04M97ScRr44afGVkI0FQFsXzysk5OgFAxjZAb9rsGQVihA== + dependencies: + "@types/eslint-scope" "^3.7.3" + "@types/estree" "^1.0.5" + "@webassemblyjs/ast" "^1.12.1" + "@webassemblyjs/wasm-edit" "^1.12.1" + "@webassemblyjs/wasm-parser" "^1.12.1" + acorn "^8.7.1" + acorn-import-attributes "^1.9.5" + browserslist "^4.21.10" + chrome-trace-event "^1.0.2" + enhanced-resolve "^5.17.0" + es-module-lexer "^1.2.1" + eslint-scope "5.1.1" + events "^3.2.0" + glob-to-regexp "^0.4.1" + graceful-fs "^4.2.11" + json-parse-even-better-errors "^2.3.1" + loader-runner "^4.2.0" + mime-types "^2.1.27" + neo-async "^2.6.2" + schema-utils "^3.2.0" + tapable "^2.1.1" + terser-webpack-plugin "^5.3.10" + watchpack "^2.4.1" + webpack-sources "^3.2.3" + +webpackbar@^5.0.0-3: + version "5.0.2" + resolved "https://registry.npmmirror.com/webpackbar/-/webpackbar-5.0.2.tgz" + integrity sha512-BmFJo7veBDgQzfWXl/wwYXr/VFus0614qZ8i9znqcl9fnEdiVkdbi0TedLQ6xAK92HZHDJ0QmyQ0fmuZPAgCYQ== + dependencies: + chalk "^4.1.0" + consola "^2.15.3" + pretty-time "^1.1.0" + std-env "^3.0.1" + +websocket-driver@>=0.5.1, websocket-driver@^0.7.4: + version "0.7.4" + resolved "https://registry.npmmirror.com/websocket-driver/-/websocket-driver-0.7.4.tgz" + integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== + dependencies: + http-parser-js ">=0.5.1" + safe-buffer ">=5.1.0" + websocket-extensions ">=0.1.1" + +websocket-extensions@>=0.1.1: + version "0.1.4" + resolved "https://registry.npmmirror.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz" + integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== + +whatwg-url@^5.0.0: + version "5.0.0" + resolved "https://registry.npmmirror.com/whatwg-url/-/whatwg-url-5.0.0.tgz" + integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== + dependencies: + tr46 "~0.0.3" + webidl-conversions "^3.0.0" + +which-boxed-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.npmmirror.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz" + integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== + dependencies: + is-bigint "^1.0.1" + is-boolean-object "^1.1.0" + is-number-object "^1.0.4" + is-string "^1.0.5" + is-symbol "^1.0.3" + +which@^1.3.1: + version "1.3.1" + resolved "https://registry.npmmirror.com/which/-/which-1.3.1.tgz" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== + dependencies: + isexe "^2.0.0" + +which@^2.0.1: + version "2.0.2" + resolved "https://registry.npmmirror.com/which/-/which-2.0.2.tgz" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +widest-line@^3.1.0: + version "3.1.0" + resolved "https://registry.npmmirror.com/widest-line/-/widest-line-3.1.0.tgz" + integrity sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg== + dependencies: + string-width "^4.0.0" + +wildcard@^2.0.0: + version "2.0.0" + resolved "https://registry.npmmirror.com/wildcard/-/wildcard-2.0.0.tgz" + integrity sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw== + +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "https://registry.npmmirror.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrappy@1: + version "1.0.2" + resolved "https://registry.npmmirror.com/wrappy/-/wrappy-1.0.2.tgz" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +write-file-atomic@^3.0.0: + version "3.0.3" + resolved "https://registry.npmmirror.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz" + integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== + dependencies: + imurmurhash "^0.1.4" + is-typedarray "^1.0.0" + signal-exit "^3.0.2" + typedarray-to-buffer "^3.1.5" + +ws@^7.3.1: + version "7.5.9" + resolved "https://registry.npmmirror.com/ws/-/ws-7.5.9.tgz" + integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== + +ws@^8.4.2: + version "8.10.0" + resolved "https://registry.npmmirror.com/ws/-/ws-8.10.0.tgz" + integrity sha512-+s49uSmZpvtAsd2h37vIPy1RBusaLawVe8of+GyEPsaJTCMpj/2v8NpeK1SHXjBlQ95lQTmQofOJnFiLoaN3yw== + +xdg-basedir@^4.0.0: + version "4.0.0" + resolved "https://registry.npmmirror.com/xdg-basedir/-/xdg-basedir-4.0.0.tgz" + integrity sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q== + +xml-js@^1.6.11: + version "1.6.11" + resolved "https://registry.npmmirror.com/xml-js/-/xml-js-1.6.11.tgz" + integrity sha512-7rVi2KMfwfWFl+GpPg6m80IVMWXLRjO+PxTq7V2CDhoGak0wzYzFgUY2m4XJ47OGdXd8eLE8EmwfAmdjw7lC1g== + dependencies: + sax "^1.2.4" + +xtend@^4.0.0, xtend@^4.0.1: + version "4.0.2" + resolved "https://registry.npmmirror.com/xtend/-/xtend-4.0.2.tgz" + integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.npmmirror.com/yallist/-/yallist-4.0.0.tgz" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yaml@^1.10.0, yaml@^1.10.2, yaml@^1.7.2: + version "1.10.2" + resolved "https://registry.npmmirror.com/yaml/-/yaml-1.10.2.tgz" + integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== + +yocto-queue@^0.1.0: + version "0.1.0" + resolved "https://registry.npmmirror.com/yocto-queue/-/yocto-queue-0.1.0.tgz" + integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== + +zwitch@^1.0.0: + version "1.0.5" + resolved "https://registry.npmmirror.com/zwitch/-/zwitch-1.0.5.tgz" + integrity sha512-V50KMwwzqJV0NpZIZFwfOD5/lyny3WlSzRiXgA0G7VUnRlqttta1L6UQIHzd6EuBY/cHGfwTIck7w1yH6Q5zUw== diff --git a/frontend/README.md b/frontend/README.md new file mode 100644 index 0000000000000000000000000000000000000000..d1139e4e4695bae424487fd5455def7c443d2cec --- /dev/null +++ b/frontend/README.md @@ -0,0 +1,85 @@ +# Frontend Application + +This frontend project aims to enhance the user experience of GPT-Researcher, providing an intuitive and efficient interface for automated research. It offers two deployment options to suit different needs and environments. + +## Option 1: Static Frontend (FastAPI) + +A lightweight solution using FastAPI to serve static files. + +#### Prerequisites +- Python 3.11+ +- pip + +#### Setup and Running + +1. Install required packages: + ``` + pip install -r requirements.txt + ``` + +2. Start the server: + ``` + python -m uvicorn main:app + ``` + +3. Access at `http://localhost:8000` + +#### Demo +https://github.com/assafelovic/gpt-researcher/assets/13554167/dd6cf08f-b31e-40c6-9907-1915f52a7110 + +## Option 2: NextJS Frontend + +A more robust solution with enhanced features and performance. + +#### Prerequisites +- Node.js (v18.17.0 recommended) +- npm + +#### Setup and Running + +1. Navigate to NextJS directory: + ``` + cd nextjs + ``` + +2. Set up Node.js: + ``` + nvm install 18.17.0 + nvm use v18.17.0 + ``` + +3. Install dependencies: + ``` + npm install --legacy-peer-deps + ``` + +4. Start development server: + ``` + npm run dev + ``` + +5. Access at `http://localhost:3000` + +Note: Requires backend server on `localhost:8000` as detailed in option 1. + +#### Demo +https://github.com/user-attachments/assets/092e9e71-7e27-475d-8c4f-9dddd28934a3 + +## Choosing an Option + +- Static Frontend: Quick setup, lightweight deployment. +- NextJS Frontend: Feature-rich, scalable, better performance and SEO. + +For production, NextJS is recommended. + +## Frontend Features + +Our frontend enhances GPT-Researcher by providing: + +1. Intuitive Research Interface: Streamlined input for research queries. +2. Real-time Progress Tracking: Visual feedback on ongoing research tasks. +3. Interactive Results Display: Easy-to-navigate presentation of findings. +4. Customizable Settings: Adjust research parameters to suit specific needs. +5. Responsive Design: Optimal experience across various devices. + +These features aim to make the research process more efficient and user-friendly, complementing GPT-Researcher's powerful agent capabilities. \ No newline at end of file diff --git a/frontend/index.html b/frontend/index.html new file mode 100644 index 0000000000000000000000000000000000000000..e022b90b90dec40c75d3a8c2e3d81346652ef724 --- /dev/null +++ b/frontend/index.html @@ -0,0 +1,204 @@ + + + + + GPT Researcher + + + + + + + + + + + + + +
+
+

+ Say Goodbye to
+ Hours + of Research +

+

+ Say Hello to GPT Researcher, your AI mate for rapid insights and comprehensive research.
+ GPT Researcher takes care of everything from accurate source gathering and organization of research results to generation of customized reports with citations. +

+ Get Started +
+
+ +
+
Auto Agent
+
+
+ + + +
+
+
+ + +
+ +
+
+ + +
+
+ + +
+
+ +

You can now do research on local documents as + well. Please make sure to add the DOC_PATH env variable pointing to your documents folder.

+ +
+ +
+ +
+

Agent Output

+

An agent tailored specifically to your task + will be generated to provide the most precise and relevant research results.

+
+
+
+ +
+ +
+ + + + + + + + + diff --git a/frontend/nextjs/.dockerignore b/frontend/nextjs/.dockerignore new file mode 100644 index 0000000000000000000000000000000000000000..ee63e9e2001f615f95324cbd4c29b5174a3ddf45 --- /dev/null +++ b/frontend/nextjs/.dockerignore @@ -0,0 +1,57 @@ +.git + +# Ignore env containing secrets +.env +.venv +.envrc + +# Ignore Virtual Env +env/ +venv/ +.venv/ + +# Other Environments +ENV/ +env.bak/ +venv.bak/ + +# Ignore generated outputs +outputs/ + +# Ignore my local docs +my-docs/ + +# Ignore pycache +**/__pycache__/ + +# Ignore mypy cache +.mypy_cache/ + +# Node modules +node_modules + +# Ignore IDE config +.idea + +# macOS specific files +.DS_Store + +# Docusaurus build artifacts +.docusaurus + +# Build directories +build +docs/build + +# Language graph data +.langgraph-data/ + +# Next.js build artifacts +.next/ + +# Package lock file +package-lock.json + +# Docker-specific exclusions (if any) +Dockerfile +docker-compose.yml diff --git a/frontend/nextjs/.eslintrc.json b/frontend/nextjs/.eslintrc.json new file mode 100644 index 0000000000000000000000000000000000000000..ea782d70ab03aa5bb6423f4b092faf80d407eede --- /dev/null +++ b/frontend/nextjs/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "next/core-web-vitals" +} diff --git a/frontend/nextjs/.example.env b/frontend/nextjs/.example.env new file mode 100644 index 0000000000000000000000000000000000000000..478a5e51bb289c2a0119f8edb95cae5049ba9e0d --- /dev/null +++ b/frontend/nextjs/.example.env @@ -0,0 +1,3 @@ +TOGETHER_API_KEY= +BING_API_KEY= +HELICONE_API_KEY= diff --git a/frontend/nextjs/.gitignore b/frontend/nextjs/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..4c4ec827361a49db9065e667d608c66a06b65600 --- /dev/null +++ b/frontend/nextjs/.gitignore @@ -0,0 +1,38 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. +.env +package-lock.json + +# dependencies +/node_modules +/.pnp +.pnp.js +.yarn/install-state.gz + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# local env files +.env*.local + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts diff --git a/frontend/nextjs/.prettierrc b/frontend/nextjs/.prettierrc new file mode 100644 index 0000000000000000000000000000000000000000..a64e3591efa13f49178391bee4daa9d90b8da69d --- /dev/null +++ b/frontend/nextjs/.prettierrc @@ -0,0 +1 @@ +{ "plugins": ["prettier-plugin-tailwindcss"] } diff --git a/frontend/nextjs/Dockerfile b/frontend/nextjs/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..83be12dc498105ae4715b4fadf77bc2eaa6b5fea --- /dev/null +++ b/frontend/nextjs/Dockerfile @@ -0,0 +1,11 @@ +FROM node:18.17.0-alpine as builder +WORKDIR /app +COPY ./package.json ./ +RUN npm install --legacy-peer-deps +COPY . . +RUN npm run build + +FROM nginx +EXPOSE 3000 +COPY ./nginx/default.conf /etc/nginx/conf.d/default.conf +COPY --from=builder /app/build /usr/share/nginx/html diff --git a/frontend/nextjs/Dockerfile.dev b/frontend/nextjs/Dockerfile.dev new file mode 100644 index 0000000000000000000000000000000000000000..77e4b7823bd9a1ce2688253737138b248943f146 --- /dev/null +++ b/frontend/nextjs/Dockerfile.dev @@ -0,0 +1,6 @@ +FROM node:18.17.0-alpine +WORKDIR /app +COPY ./package.json ./ +RUN npm install --legacy-peer-deps +COPY . . +CMD ["npm", "run", "dev"] \ No newline at end of file diff --git a/frontend/nextjs/README.md b/frontend/nextjs/README.md new file mode 100644 index 0000000000000000000000000000000000000000..bac8f764b486dad0ab9f1c03e006cab51487cf1f --- /dev/null +++ b/frontend/nextjs/README.md @@ -0,0 +1,4 @@ +## Cloning & running + +1. Create a `.env` (use the `.example.env` for reference) and replace the API keys +2. Run `npm install --legacy-peer-deps` and `npm run dev` to install dependencies and run locally diff --git a/frontend/nextjs/actions/apiActions.ts b/frontend/nextjs/actions/apiActions.ts new file mode 100644 index 0000000000000000000000000000000000000000..45d7d8b8a3855a27af4431366763ea9d8719e211 --- /dev/null +++ b/frontend/nextjs/actions/apiActions.ts @@ -0,0 +1,108 @@ +import { createParser, ParsedEvent, ReconnectInterval } from "eventsource-parser"; + +export async function handleSourcesAndAnswer(question: string) { + let sourcesResponse = await fetch("/api/getSources", { + method: "POST", + body: JSON.stringify({ question }), + }); + let sources = await sourcesResponse.json(); + + const response = await fetch("/api/getAnswer", { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ question, sources }), + }); + + if (!response.ok) { + throw new Error(response.statusText); + } + + if (response.status === 202) { + const fullAnswer = await response.text(); + return fullAnswer; + } + + // This data is a ReadableStream + const data = response.body; + if (!data) { + return; + } + + const onParse = (event: ParsedEvent | ReconnectInterval) => { + if (event.type === "event") { + const data = event.data; + try { + const text = JSON.parse(data).text ?? ""; + return text; + } catch (e) { + console.error(e); + } + } + }; + + // https://web.dev/streams/#the-getreader-and-read-methods + const reader = data.getReader(); + const decoder = new TextDecoder(); + const parser = createParser(onParse); + let done = false; + while (!done) { + const { value, done: doneReading } = await reader.read(); + done = doneReading; + const chunkValue = decoder.decode(value); + parser.feed(chunkValue); + } +} + +export async function handleSimilarQuestions(question: string) { + let res = await fetch("/api/getSimilarQuestions", { + method: "POST", + body: JSON.stringify({ question }), + }); + let questions = await res.json(); + return questions; +} + +export async function handleLanggraphAnswer(question: string) { + const response = await fetch("/api/generateLanggraph", { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ question }), + }); + + if (!response.ok) { + throw new Error(response.statusText); + } + + // This data is a ReadableStream + const data = response.body; + if (!data) { + return; + } + + const onParse = (event: ParsedEvent | ReconnectInterval) => { + if (event.type === "event") { + const data = event.data; + try { + const text = JSON.parse(data).text ?? ""; + return text; + } catch (e) { + console.error(e); + } + } + }; + + const reader = data.getReader(); + const decoder = new TextDecoder(); + const parser = createParser(onParse); + let done = false; + while (!done) { + const { value, done: doneReading } = await reader.read(); + done = doneReading; + const chunkValue = decoder.decode(value); + parser.feed(chunkValue); + } +} \ No newline at end of file diff --git a/frontend/nextjs/app/globals.css b/frontend/nextjs/app/globals.css new file mode 100644 index 0000000000000000000000000000000000000000..e1e860487d9531c6277e8d486ef4c5089cd636cc --- /dev/null +++ b/frontend/nextjs/app/globals.css @@ -0,0 +1,122 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; + +@keyframes gradientBG { + 0% {background-position: 0% 50%;} + 50% {background-position: 100% 50%;} + 100% {background-position: 0% 50%;} +} + +html { + scroll-behavior: smooth; +} + +textarea { + max-height: 300px; /* Set an appropriate max height */ + overflow-y: auto; /* Enable internal scrolling */ + /* transition: height 0.2s ease-in-out; */ +} + +.log-message { + word-wrap: break-word; /* For handling long URLs or text */ + overflow-wrap: break-word; /* For handling overflow in modern browsers */ + overflow-x: hidden; /* Hide horizontal overflow */ + word-break: break-word; /* Break long words if needed */ +} + +body { + font-family: 'Montserrat', sans-serif; + line-height: 1.6; + background-size: 200% 200%; + background-image: linear-gradient(170deg, #151A2D, #036f73, #151A2D); + /*animation: gradientBG 10s ease infinite;*/ +} + +.landing { + display: flex; + justify-content: center; + align-items: center; + height: 30vh; + text-align: center; + color: white; +} + +.landing h1 { + font-size: 3.5rem; + font-weight: 700; + margin-bottom: 2rem; +} + +@layer utilities { + .text-balance { + text-wrap: balance; + } + /* Hide scrollbar for Chrome, Safari and Opera */ + .no-scrollbar::-webkit-scrollbar { + display: none; + } + /* Hide scrollbar for IE, Edge and Firefox */ + .no-scrollbar { + -ms-overflow-style: none; /* IE and Edge */ + scrollbar-width: none; /* Firefox */ + } + .loader { + text-align: left; + display: flex; + gap: 3px; + } + + .loader span { + display: inline-block; + vertical-align: middle; + width: 7px; + height: 7px; + /* background: #4b4b4b; */ + background: white; + border-radius: 50%; + animation: loader 0.6s infinite alternate; + } + + .loader span:nth-of-type(2) { + animation-delay: 0.2s; + } + + .loader span:nth-of-type(3) { + animation-delay: 0.6s; + } + + @keyframes loader { + 0% { + opacity: 1; + transform: scale(0.6); + } + + 100% { + opacity: 0.3; + transform: scale(1); + } + } +} + +body { + margin: 0px !important; +} + +/* Add these styles for the scrollbar */ +.scrollbar-thin { + scrollbar-width: thin; +} + +.scrollbar-thumb-gray-600::-webkit-scrollbar-thumb { + background-color: #4B5563; + border-radius: 6px; +} + +.scrollbar-track-gray-300::-webkit-scrollbar-track { + background-color: #D1D5DB; +} + +.scrollbar-thin::-webkit-scrollbar { + width: 6px; +} diff --git a/frontend/nextjs/app/layout.tsx b/frontend/nextjs/app/layout.tsx new file mode 100644 index 0000000000000000000000000000000000000000..bf78a9aec460fb473e69b2de6dd91fb3a435730d --- /dev/null +++ b/frontend/nextjs/app/layout.tsx @@ -0,0 +1,56 @@ +import type { Metadata } from "next"; +import { Lexend } from "next/font/google"; +import PlausibleProvider from "next-plausible"; +import "./globals.css"; + +const inter = Lexend({ subsets: ["latin"] }); + +let title = "GPT Researcher"; +let description = + "LLM based autonomous agent that conducts local and web research on any topic and generates a comprehensive report with citations."; +let url = "https://github.com/assafelovic/gpt-researcher"; +let ogimage = "/favicon.ico"; +let sitename = "GPT Researcher"; + +export const metadata: Metadata = { + metadataBase: new URL(url), + title, + description, + icons: { + icon: "/favicon.ico", + }, + openGraph: { + images: [ogimage], + title, + description, + url: url, + siteName: sitename, + locale: "en_US", + type: "website", + }, + twitter: { + card: "summary_large_image", + images: [ogimage], + title, + description, + }, +}; + +export default function RootLayout({ + children, +}: Readonly<{ + children: React.ReactNode; +}>) { + return ( + + + + + + {children} + + + ); +} diff --git a/frontend/nextjs/app/page.tsx b/frontend/nextjs/app/page.tsx new file mode 100644 index 0000000000000000000000000000000000000000..23dee8f6638db82a4f60cb134e28b745e62b3b34 --- /dev/null +++ b/frontend/nextjs/app/page.tsx @@ -0,0 +1,317 @@ +"use client"; + +import { useRef, useState, useEffect, useCallback } from "react"; +import { useWebSocket } from '@/hooks/useWebSocket'; +import { startLanggraphResearch } from '../components/Langgraph/Langgraph'; +import findDifferences from '../helpers/findDifferences'; +import { Data, ChatBoxSettings, QuestionData } from '../types/data'; +import { preprocessOrderedData } from '../utils/dataProcessing'; +import { ResearchResults } from '../components/ResearchResults'; + +import Header from "@/components/Header"; +import Hero from "@/components/Hero"; +import Footer from "@/components/Footer"; +import InputArea from "@/components/ResearchBlocks/elements/InputArea"; +import HumanFeedback from "@/components/HumanFeedback"; +import LoadingDots from "@/components/LoadingDots"; + +export default function Home() { + const [promptValue, setPromptValue] = useState(""); + const [showResult, setShowResult] = useState(false); + const [answer, setAnswer] = useState(""); + const [loading, setLoading] = useState(false); + const [chatBoxSettings, setChatBoxSettings] = useState({ + report_source: 'web', + report_type: 'research_report', + tone: 'Objective' + }); + const [question, setQuestion] = useState(""); + const [orderedData, setOrderedData] = useState([]); + const [showHumanFeedback, setShowHumanFeedback] = useState(false); + const [questionForHuman, setQuestionForHuman] = useState(false); + const [allLogs, setAllLogs] = useState([]); + const chatContainerRef = useRef(null); + const [isStopped, setIsStopped] = useState(false); + const [showScrollButton, setShowScrollButton] = useState(false); + const mainContentRef = useRef(null); + + const { socket, initializeWebSocket } = useWebSocket( + setOrderedData, + setAnswer, + setLoading, + setShowHumanFeedback, + setQuestionForHuman + ); + + const handleFeedbackSubmit = (feedback: string | null) => { + if (socket) { + socket.send(JSON.stringify({ type: 'human_feedback', content: feedback })); + } + setShowHumanFeedback(false); + }; + + const handleChat = async (message: string) => { + if (socket) { + setShowResult(true); + setQuestion(message); + setLoading(true); + setPromptValue(""); + setAnswer(""); + + const questionData: QuestionData = { type: 'question', content: message }; + setOrderedData(prevOrder => [...prevOrder, questionData]); + + socket.send(`chat${JSON.stringify({ message })}`); + } + }; + + const handleDisplayResult = async (newQuestion: string) => { + setShowResult(true); + setLoading(true); + setQuestion(newQuestion); + setPromptValue(""); + setAnswer(""); + setOrderedData((prevOrder) => [...prevOrder, { type: 'question', content: newQuestion }]); + + const storedConfig = localStorage.getItem('apiVariables'); + const apiVariables = storedConfig ? JSON.parse(storedConfig) : {}; + const langgraphHostUrl = apiVariables.LANGGRAPH_HOST_URL; + + if (chatBoxSettings.report_type === 'multi_agents' && langgraphHostUrl) { + let { streamResponse, host, thread_id } = await startLanggraphResearch(newQuestion, chatBoxSettings.report_source, langgraphHostUrl); + const langsmithGuiLink = `https://smith.langchain.com/studio/thread/${thread_id}?baseUrl=${host}`; + setOrderedData((prevOrder) => [...prevOrder, { type: 'langgraphButton', link: langsmithGuiLink }]); + + let previousChunk = null; + for await (const chunk of streamResponse) { + if (chunk.data.report != null && chunk.data.report != "Full report content here") { + setOrderedData((prevOrder) => [...prevOrder, { ...chunk.data, output: chunk.data.report, type: 'report' }]); + setLoading(false); + } else if (previousChunk) { + const differences = findDifferences(previousChunk, chunk); + setOrderedData((prevOrder) => [...prevOrder, { type: 'differences', content: 'differences', output: JSON.stringify(differences) }]); + } + previousChunk = chunk; + } + } else { + initializeWebSocket(newQuestion, chatBoxSettings); + } + }; + + const reset = () => { + setShowResult(false); + setPromptValue(""); + setQuestion(""); + setAnswer(""); + }; + + const handleClickSuggestion = (value: string) => { + setPromptValue(value); + const element = document.getElementById('input-area'); + if (element) { + element.scrollIntoView({ behavior: 'smooth' }); + } + }; + + /** + * Handles stopping the current research + * - Closes WebSocket connection + * - Stops loading state + * - Marks research as stopped + * - Preserves current results + */ + const handleStopResearch = () => { + if (socket) { + socket.close(); + } + setLoading(false); + setIsStopped(true); + }; + + /** + * Handles starting a new research + * - Clears all previous research data and states + * - Resets UI to initial state + * - Closes any existing WebSocket connections + */ + const handleStartNewResearch = () => { + // Reset UI states + setShowResult(false); + setPromptValue(""); + setIsStopped(false); + + // Clear previous research data + setQuestion(""); + setAnswer(""); + setOrderedData([]); + setAllLogs([]); + + // Reset feedback states + setShowHumanFeedback(false); + setQuestionForHuman(false); + + // Clean up connections + if (socket) { + socket.close(); + } + setLoading(false); + }; + + /** + * Processes ordered data into logs for display + * Updates whenever orderedData changes + */ + useEffect(() => { + const groupedData = preprocessOrderedData(orderedData); + const statusReports = ["agent_generated", "starting_research", "planning_research"]; + + const newLogs = groupedData.reduce((acc: any[], data) => { + // Process accordion blocks (grouped data) + if (data.type === 'accordionBlock') { + const logs = data.items.map((item: any, subIndex: any) => ({ + header: item.content, + text: item.output, + metadata: item.metadata, + key: `${item.type}-${item.content}-${subIndex}`, + })); + return [...acc, ...logs]; + } + // Process status reports + else if (statusReports.includes(data.content)) { + return [...acc, { + header: data.content, + text: data.output, + metadata: data.metadata, + key: `${data.type}-${data.content}`, + }]; + } + return acc; + }, []); + + setAllLogs(newLogs); + }, [orderedData]); + + const handleScroll = useCallback(() => { + // Calculate if we're near bottom (within 100px) + const scrollPosition = window.scrollY + window.innerHeight; + const nearBottom = scrollPosition >= document.documentElement.scrollHeight - 100; + + // Show button if we're not near bottom and page is scrollable + const isPageScrollable = document.documentElement.scrollHeight > window.innerHeight; + setShowScrollButton(isPageScrollable && !nearBottom); + }, []); + + // Add ResizeObserver to watch for content changes + useEffect(() => { + const resizeObserver = new ResizeObserver(() => { + handleScroll(); + }); + + if (mainContentRef.current) { + resizeObserver.observe(mainContentRef.current); + } + + window.addEventListener('scroll', handleScroll); + window.addEventListener('resize', handleScroll); + + return () => { + if (mainContentRef.current) { + resizeObserver.unobserve(mainContentRef.current); + } + resizeObserver.disconnect(); + window.removeEventListener('scroll', handleScroll); + window.removeEventListener('resize', handleScroll); + }; + }, [handleScroll]); + + const scrollToBottom = () => { + window.scrollTo({ + top: document.documentElement.scrollHeight, + behavior: 'smooth' + }); + }; + + return ( + <> +
+
+ {!showResult && ( + + )} + + {showResult && ( +
+
+
+ +
+ + {showHumanFeedback && ( + + )} + +
+
+
+ {loading ? ( + + ) : ( + + )} +
+
+ )} +
+ {showScrollButton && showResult && ( + + )} +