diff --git a/.changesets/11238.md b/.changesets/11238.md new file mode 100644 index 000000000000..397b61166158 --- /dev/null +++ b/.changesets/11238.md @@ -0,0 +1,14 @@ +- Adds background job scheduling and execution (#10906) by @cannikin + +This new package provides scheduling and processing of background jobs. We want everything needed to run a modern web application to be included in Redwood itself—you shouldn't need any third party integrations if you don't want. + +Background jobs have been sorely missed, but the time has come! (If you do want to use a third party service we have had an [integration with Inngest](https://community.redwoodjs.com/t/ship-background-jobs-crons-webhooks-and-reliable-workflows-in-record-time-with-inngest-and-redwoodjs/4866) since May of 2023!) + +## Features + +- Named queues: you can schedule jobs in separate named queues and have a different number of workers monitoring each one—makes it much easier to scale your background processing +- Priority: give your jobs a priority from 1 (highest) to 100 (lowest). Workers will sort available jobs by priority, working the most important ones first. +- Configurable delay: run your job as soon as possible (default), wait a number of seconds before running, or run at a specific time in the future +- Auto-retries with backoff: if your job fails it will back off at the rate of attempts \*\* 4 for a default of 24 tries, the time between the last two attempts is a little over three days. +- Run inline: instead of scheduling to run in the background, run immediately +- Integrates with Redwood's [logger](https://docs.redwoodjs.com/docs/logger): use your existing one in api/src/lib/logger or create a new one just for job logging diff --git a/.changesets/11337.md b/.changesets/11337.md new file mode 100644 index 000000000000..04ac9939bd34 --- /dev/null +++ b/.changesets/11337.md @@ -0,0 +1,3 @@ +- refactor(prerender): build with esbuild and introduce conditional exports (#11337) by @Josh-Walker-GM + +This change restricts the available imports from the `@redwoodjs/prerender` package. You will also have to use modern `moduleResolution` settings in your tsconfig to resolve the imports correctly within TS. diff --git a/.changesets/11338.md b/.changesets/11338.md new file mode 100644 index 000000000000..1ccc0a64da75 --- /dev/null +++ b/.changesets/11338.md @@ -0,0 +1,3 @@ +- refactor(forms): Build with esbuild and add conditional exports (#11338) by @Josh-Walker-GM + +This change introduces restrictions on what can be imported from the `@redwoodjs/forms` package. You can no longer import from `@redwoodjs/forms/dist/...`. All imports should be available simply from `@redwoodjs/forms`. diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 266936d63bae..ea62722084cc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -68,6 +68,22 @@ jobs: steps: - run: echo "Skipped" + formatting-check: + name: 📝 Check formatting (prettier) + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - name: Set up job + uses: ./.github/actions/set-up-job + with: + set-up-yarn-cache: false + yarn-install-directory: . + build: false + + - name: 📝 Check formatting (prettier) + run: yarn format:check + build-lint-test: needs: check @@ -90,9 +106,6 @@ jobs: - name: 🔎 Lint run: yarn lint - - name: 📝 Check formatting (prettier) - run: yarn format:check - - name: 🥡 Check packaging and attw run: yarn check:package diff --git a/.vscode/settings.json b/.vscode/settings.json index 7cc9205f31c7..001168545204 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -22,6 +22,7 @@ "baremetal", "bazinga", "corepack", + "daemonized", "envinfo", "execa", "Fastify", diff --git a/docs/docs/background-jobs.md b/docs/docs/background-jobs.md new file mode 100644 index 000000000000..921eee45ba94 --- /dev/null +++ b/docs/docs/background-jobs.md @@ -0,0 +1,744 @@ +# Background Jobs + +No one likes waiting in line. This is especially true of your website: users don't want to wait for things to load that don't directly impact the task they're trying to accomplish. For example, sending a "welcome" email when a new user signs up. The process of sending the email could take as long or longer than the sum total of everything else that happens during that request. Why make the user wait for it? As long as they eventually get the email, everything is good. + +## Concepts + +A typical create-user flow could look something like this: + +![jobs-before](/img/background-jobs/jobs-before.png) + +If we want the email to be send asynchronously, we can shuttle that process off into a **background job**: + +![jobs-after](/img/background-jobs/jobs-after.png) + +The user's response is returned much quicker, and the email is sent by another process, literally running in the background. All of the logic around sending the email is packaged up as a **job** and a **job worker** is responsible for executing it. + +Each job is completely self-contained and has everything it needs to perform its own task. + +### Overview + +There are three components to the background job system in Redwood: + +1. Scheduling +2. Storage +3. Execution + +**Scheduling** is the main interface to background jobs from within your application code. This is where you tell the system to run a job at some point in the future, whether that's: + +- as soon as possible +- delay for an amount of time before running +- run at a specific datetime in the future + +**Storage** is necessary so that your jobs are decoupled from your running application. The job system interfaces with storage via an **adapter**. With the included `PrismaAdapter`, jobs are stored in your database. This allows you to scale everything independently: the api server (which is scheduling jobs), the database (which is storing the jobs ready to be run), and the job workers (which are executing the jobs). + +**Execution** is handled by a **job worker**, which takes a job from storage, executes it, and then does something with the result, whether it was a success or failure. + +:::info Job execution time is never guaranteed + +When scheduling a job, you're really saying "this is the earliest possible time I want this job to run": based on what other jobs are in the queue, and how busy the workers are, they may not get a chance to execute this one particular job for an indeterminate amount of time. + +The only thing that's guaranteed is that a job won't run any _earlier_ than the time you specify. + +::: + +### Queues + +Jobs are organized by a named **queue**. This is simply a string and has no special significance, other than letting you group jobs. Why group them? So that you can potentially have workers with different configurations working on them. Let's say you send a lot of emails, and you find that among all your other jobs, emails are starting to be noticeably delayed when sending. You can start assigning those jobs to the "email" queue and create a new worker group that _only_ focuses on jobs in that queue so that they're sent in a more timely manner. + +Jobs are sorted by **priority** before being selected to be worked on. Lower numbers mean higher priority: + +![job-queues](/img/background-jobs/jobs-queues.png) + +You can also increase the number of workers in a group. If we bumped the group working on the "default" queue to 2 and started our new "email" group with 1 worker, once those workers started we would see them working on the following jobs: + +![job-workers](/img/background-jobs/jobs-workers.png) + +## Quick Start + +Start here if you want to get up and running with jobs as quickly as possible and worry about the details later. + +### Setup + +Run the setup command to get the jobs configuration file created and migrate the database with a new `BackgroundJob` table: + +```bash +yarn rw setup jobs +yarn rw prisma migrate dev +``` + +This created `api/src/lib/jobs.js` (or `.ts`) with a sensible default config. You can leave this as is for now. + +### Create a Job + +```bash +yarn rw g job SampleJob +``` + +This created `api/src/jobs/SampleJob/SampleJob.js` and a test and scenario file. For now the job just outputs a message to the logs, but you'll fill out the `perform()` function to take any arguments you want and perform any work you want to do. Let's update the job to take a user's `id` and then just print that to the logs: + +```js +import { jobs } from 'src/lib/jobs' + +export const SampleJob = jobs.createJob({ + queue: 'default', + // highlight-start + perform: async (userId) => { + jobs.logger.info(`Received user id ${userId}`) + }, + // highlight-end +}) +``` + +### Schedule a Job + +You'll most likely be scheduling work as the result of one of your service functions being executed. Let's say we want to schedule our `SampleJob` whenever a new user is created: + +```js title="api/src/services/users/users.js" +import { db } from 'src/lib/db' +// highlight-start +import { later } from 'src/lib/jobs' +import { SampleJob } from 'src/jobs/SampleJob' +// highlight-end + +export const createUser = async ({ input }) => { + const user = await db.user.create({ data: input }) + // highlight-next-line + await later(SampleJob, [user.id], { wait: 60 }) + return user +} +``` + +The first argument is the job itself, the second argument is an array of all the arguments your job should receive. The job itself defines them as normal, named arguments (like `userId`), but when you schedule you wrap them in an array (like `[user.id]`). The third argument is an optional object that provides a couple of options. In this case, the number of seconds to `wait` before this job will be run (60 seconds). + +### Executing a Job + +Start the worker process to find jobs in the DB and execute them: + +```bash +yarn rw jobs work +``` + +This process will stay attached to the terminal and show you debug log output as it looks for jobs to run. Note that since we scheduled our job to wait 60 seconds before running, the runner will not find a job to work on right away (unless it's already been a minute since you scheduled it!). + +That's the basics of jobs! Keep reading to get a more detailed walkthrough, followed by the API docs listing all the various options. We'll wrap up with a discussion of using jobs in a production environment. + +## In-Depth Start + +Let's go into more depth in each of the parts of the job system. + +### Installation + +To get started with jobs, run the setup command: + +```bash +yarn rw setup jobs +``` + +This will add a new model to your Prisma schema, and create a configuration file at `api/src/lib/jobs.js` (or `.ts` for a Typescript project). You'll need to run migrations in order to actually create the model in your database: + +```bash +yarn rw prisma migrate dev +``` + +This added the following model: + +```prisma +model BackgroundJob { + id Int @id @default(autoincrement()) + attempts Int @default(0) + handler String + queue String + priority Int + runAt DateTime? + lockedAt DateTime? + lockedBy String? + lastError String? + failedAt DateTime? + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt +} +``` + +Let's look at the config file that was generated. Comments have been removed for brevity: + +```js +import { PrismaAdapter, JobManager } from '@redwoodjs/jobs' + +import { db } from 'src/lib/db' +import { logger } from 'src/lib/logger' + +export const jobs = new JobManager({ + adapters: { + prisma: new PrismaAdapter({ db, logger }), + }, + queues: ['default'], + logger, + workers: [ + { + adapter: 'prisma', + logger, + queue: '*', + count: 1, + maxAttempts: 24, + maxRuntime: 14_400, + deleteFailedJobs: false, + sleepDelay: 5, + }, + ], +}) + +export const later = jobs.createScheduler({ + adapter: 'prisma', +}) +``` + +Two variables are exported: one is an instance of the `JobManager` called `jobs` on which you'll call functions to create jobs and schedulers. The other is `later` which is an instance of the `Scheduler`, which is responsible for getting your job into the storage system (out of the box this will be the database thanks to the `PrismaAdapter`). + +We'll go into more detail on this file later (see [JobManager Config](#jobmanager-config)), but what's there now is fine to get started creating a job. + +### Creating New Jobs + +We have a generator that creates a job in `api/src/jobs`: + +```bash +yarn rw g job SendWelcomeEmail +``` + +Jobs are defined as a plain object and given to the `createJob()` function (which is called on the `jobs` export in the config file above). An example `SendWelcomeEmailJob` may look something like: + +```js +import { db } from 'src/lib/db' +import { mailer } from 'src/lib/mailer' +import { jobs } from 'src/lib/jobs' + +export const SendWelcomeEmailJob = jobs.createJob({ + queue: 'default', + perform: async (userId) => { + const user = await db.user.findUnique({ where: { id: userId } }) + mailer.send(WelcomeEmail({ user }), { + to: user.email, + subject: `Welcome to the site!`, + }) + }, +}) +``` + +At a minimum, a job must contain the name of the `queue` the job should be saved to, and a function named `perform()` which contains the logic for your job. You can add additional properties to the object to support the task your job is performing, but `perform()` is what's invoked by the job worker that we'll see later. + +Note that `perform()` can take any argument(s) you want (or none at all), but it's a best practice to keep them as simple as possible. With the `PrismaAdapter` the arguments are stored in the database, so the list of arguments must be serializable to and from a string of JSON. + +:::info Keeping Arguments Simple + +Most jobs will probably act against data in your database, so it makes sense to have the arguments simply be the `id` of those database records. When the job executes it will look up the full database record and then proceed from there. + +If it's likely that the data in the database will change before your job is actually run, but you need the job to run with the original data, you may want to include the original values as arguments to your job. This way the job is sure to be working with those original values and not the potentially changed ones in the database. + +::: + +### Scheduling Jobs + +Remember the `later` export in the jobs config file: + +```js +export const later = jobs.createScheduler({ + adapter: 'prisma', +}) +``` + +You call this function, passing the job, job arguments, and an optional options object when you want to schedule a job. Let's see how we'd schedule our welcome email to go out when a new user is created: + +```js +// highlight-start +import { later } from 'src/lib/jobs' +import { SendWelcomeEmailJob } from 'src/jobs/SendWelcomeEmailJob' +// highlight-end + +export const createUser = async ({ input }) { + const user = await db.user.create({ data: input }) + // highlight-next-line + await later(SendWelcomeEmailJob, [user.id]) + return user +} +``` + +By default the job will run as soon as possible. If you wanted to wait five minutes before sending the email you can set a `wait` time to a number of seconds: + +```js +later(SendWelcomeEmailJob, [user.id], { wait: 300 }) +``` + +Or run it at a specific datetime: + +```js +later(MillenniumAnnouncementJob, [user.id], { + waitUntil: new Date(3000, 0, 1, 0, 0, 0), +}) +``` + +:::info Running a Job Immediately + +As noted in the [Concepts](#concepts) section, a job is never _guaranteed_ to run at an exact time. The worker could be busy working on other jobs and can't get to yours just yet. + +If you absolutely, positively need your job to run right _now_ (with the knowledge that the user will be waiting for it to complete) you can call your job's `perform` function directly in your code: + +```js +await SampleEmailJob.perform(user.id) +``` + +::: + +If we were to query the `BackgroundJob` table after the job has been scheduled you'd see a new row. We can use the Redwood Console to query the table from the command line: + +```js +% yarn rw console +> db.backgroundJob.findMany() +[ + { + id: 1, + attempts: 0, + handler: '{"name":"SendWelcomeEmailJob",path:"SendWelcomeEmailJob/SendWelcomeEmailJob","args":[335]}', + queue: 'default', + priority: 50, + runAt: 2024-07-12T22:27:51.085Z, + lockedAt: null, + lockedBy: null, + lastError: null, + failedAt: null, + createdAt: 2024-07-12T22:27:51.125Z, + updatedAt: 2024-07-12T22:27:51.125Z + } +] +``` + +:::info + +Because we're using the `PrismaAdapter` here all jobs are stored in the database, but if you were using a different storage mechanism via a different adapter you would have to query those in a manner specific to that adapter's backend. + +::: + +The `handler` column contains the name of the job, file path to find it, and the arguments its `perform()` function will receive. Where did the `name` and `path` come from? We have a babel plugin that adds them to your job when they are built. + +:::warning Jobs Must Be Built + +Jobs are run from the `api/dist` directory, which will exist only after running `yarn rw build api` or `yarn rw dev`. If you are working on a job in development, you're probably running `yarn rw dev` anyway. But just be aware that if the dev server is _not_ running then any changes to your job will not be reflected unless you run `yarn rw build api` (or start the dev server) to compile your job into `api/dist`. + +::: + +### Executing Jobs + +In development you can start a job worker via the **job runner** from the command line: + +```bash +yarn rw jobs work +``` + +The runner is a sort of overseer that doesn't do any work itself, but spawns workers to actually execute the jobs. When starting in `work` mode your `workers` config will be used to start the workers and they will stay attached to the terminal, updating you on the status of what they're doing: + +![image](/img/background-jobs/jobs-terminal.png) + +It checks the `BackgroundJob` table every few seconds for a new job and, if it finds one, locks it so that no other workers can have it, then calls your `perform()` function, passing it the arguments you gave when you scheduled it. + +If the job succeeds then by default it's removed from the database (using the `PrismaAdapter`, other adapters behavior may vary). If the job fails, the job is un-locked in the database, the `runAt` is set to an incremental backoff time in the future, and `lastError` is updated with the error that occurred. The job will now be picked up in the future once the `runAt` time has passed and it'll try again. + +To stop the runner (and the workers it started), press `Ctrl-C` (or send `SIGINT`). The workers will gracefully shut down, waiting for their work to complete before exiting. If you don't wait to wait, hit `Ctrl-C` again (or send `SIGTERM`). + +There are a couple of additional modes that `rw jobs` can run in: + +```bash +yarn rw jobs workoff +``` + +This mode will execute all jobs that eligible to run, then stop itself. + +```bash +yarn rw jobs start +``` + +Starts the workers and then detaches them to run forever. Use `yarn rw jobs stop` to stop them, or `yarn rw jobs restart` to pick up any code changes to your jobs. + +### Everything Else + +The rest of this doc describes more advanced usage, like: + +- Assigning jobs to named **queues** +- Setting a **priority** so that some jobs always run before others +- Using different adapters and loggers on a per-job basis +- Starting more than one worker +- Having some workers focus on only certain queues +- Configuring individual workers to use different adapters +- Manually workers without the job runner monitoring them +- And more! + +## Configuration + +There are a bunch of ways to customize your jobs and the workers. + +### JobManager Config + +Let's take a closer look at the `jobs` export in `api/src/lib/jobs.js`: + +```js +export const jobs = new JobManager({ + adapters: { + prisma: new PrismaAdapter({ db, logger }), + }, + queues: ['default'], + logger, + workers: [ + { + adapter: 'prisma', + logger, + queue: '*', + count: 1, + maxAttempts: 24, + maxRuntime: 14_400, + deleteFailedJobs: false, + sleepDelay: 5, + }, + ], +}) +``` + +The object passed here contains all of the configuration for the Background Job system. Let's take a quick look at the four top-level properties and then we'll get into more details in the subsections to follow. + +#### `adapters` + +This is the list of adapters that are available to handle storing and retrieving your jobs to and from the storage system. You could list more than one adapter here and then have multiple schedulers. Most folks will probably stick with a single one. + +#### `queues` + +An array of available queue names that jobs can be placed in. By default, a single queue named "default" is listed here, and will also be the default queue for generated jobs. To denote the named queue that a worker will look at, there is a matching `queue` property on the `workers` config below. + +#### `logger` + +The logger object for all internal logging of the job system itself and will fall back to `console` if you don't set it. + +#### `workers` + +This is an array of objects, each defining a "group" of workers. When will you need more than one group? If you need workers to work on different queues, or use different adapters. Read more about this in the [Job Workers](#job-workers) section. + +### Adapter Config + +Adapters are added as key/value pairs to the `adapters` object given to the `JobManager` upon initialization. The key of the property (like `prisma` in the example below) is the name you'll use in your scheduler when you tell it which adapter to use to schedule your jobs. Adapters accept an object of options when they are initialized. + +#### PrismaAdapter + +```js +export const jobs = new JobManager({ + adapters: { + // highlight-next-line + prisma: new PrismaAdapter({ db, model: 'BackgroundJob', logger }), + }, + // remaining config... +}) +``` + +- `db`: **[required]** an instance of `PrismaClient` that the adapter will use to store, find and update the status of jobs. In most cases this will be the `db` variable exported from `api/src/lib/db.js`. This must be set in order for the adapter to be initialized! +- `model`: the name of the model that was created to store jobs. This defaults to `BackgroundJob`. +- `logger`: events that occur within the adapter will be logged using this. This defaults to `console` but the `logger` exported from `api/src/lib/logger` works great. + +### Scheduler Config + +When you create an instance of the scheduler you can pass it a couple of options: + +```js +export const later = jobs.createScheduler({ + adapter: 'prisma', +}) +``` + +- `adapter` : **[required]** the name of the adapter this scheduler will use to schedule jobs. Must be one of the keys that you gave to the `adapters` option on the JobManager itself. +- `logger` : the logger to use for this instance of the scheduler. If not provided, defaults to the `logger` set on the `JobManager`. + +#### Scheduling Options + +When using the scheduler to schedule a job you can pass options in an optional third argument: + +```js +later(SampleJob, [user.id], { wait: 300 }) +``` + +- `wait`: number of seconds to wait before the job will run +- `waitUntil`: a specific `Date` in the future to run at + +If you don't pass any options then the job will be defaulted to run as soon as possible, ie: `new Date()` + +### Job Config + +There are two configuration options you can define in the object that describes your job: + +```js +import { jobs } from 'src/lib/jobs' + +export const SendWelcomeEmailJob = jobs.createJob({ + // highlight-start + queue: 'email', + priority: 1, + // highlight-end + perform: async (userId) => { + // job details... + }, +}) +``` + +- `queue` : **[required]** the name of the queue that this job will be placed in. Must be one of the strings you assigned to `queues` array when you set up the `JobManager`. +- `priority` : within a queue you can have jobs that are more or less important. The workers will pull jobs off the queue with a higher priority before working on ones with a lower priority. A lower number is _higher_ in priority than a higher number. ie. the workers will work on a job with a priority of `1` before they work on one with a priority of `100`. If you don't override it here, the default priority is `50`. + +### Worker Config + +This is the largest section of the `JobManager` config object. This options array tell the workers how to behave when looking for and executing jobs. + +```js +export const jobs = new JobManager({ + // .. more config + workers: [ + { + adapter: 'prisma', + logger, + queue: '*', + count: 1, + maxAttempts: 24, + maxRuntime: 14_400, + deleteFailedJobs: true, + deleteFailedJobs: false, + sleepDelay: 5, + }, + ], + +``` + +This is an array of objects. Each object represents the config for a single "group" of workers. By default, there is only one worker group. It uses the `PrismaAdapter` and will look for jobs in all queues. If you want to start fine tuning your workers by working with different adapters, or only working on some named queues, you can add additional members to this array, each with a unique set of options. + +- `adapter` : **[required]** the name of the adapter this worker group will use. Must be one of the keys that you gave to the `adapters` option on the `JobManager` itself. +- `logger` : the logger to use when working on jobs. If not provided, defaults to the `logger` set on the `JobManager`. You can use this logger in the `perform()` function of your job by accessing `jobs.logger` +- queue : **[required]** the named queue(s) in which this worker group will watch for jobs. There is a reserved `'*'` value you can use which means "all queues." This can be an array of queues as well: `['default', 'email']` for example. +- `count` : **[required]** the number of workers to start with this config. +- `maxAttempts`: the maximum number of times to retry a job before giving up. A job that throws an error will be set to retry in the future with an exponential backoff in time equal to the number of previous attempts \*\* 4. After this number, a job is considered "failed" and will not be re-attempted. Default: `24`. +- `maxRuntime` : the maximum amount of time, in seconds, to try running a job before another worker will pick it up and try again. It's up to you to make sure your job doesn't run for longer than this amount of time! Default: `14_400` (4 hours). +- `deleteFailedJobs` : when a job has failed (maximum number of retries has occurred) you can keep the job in the database, or delete it. Default: `false`. +- `deleteSuccessfulJobs` : when a job has succeeded, you can keep the job in the database, or delete it. It's generally assumed that your jobs _will_ succeed so it usually makes sense to clear them out and keep the queue lean. Default: `true`. +- `sleepDelay` : the amount of time, in seconds, to check the queue for another job to run. Too low and you'll be thrashing your storage system looking for jobs, too high and you start to have a long delay before any job is run. Default: `5`. + +See the next section for advanced usage examples, like multiple worker groups. + +## Job Workers + +A job worker actually executes your jobs. The workers will ask the adapter to find a job to work on. The adapter will mark the job as locked (the process name and a timestamp is set on the job) and then the worker will call `perform()` on your job, passing in any args that were given when you scheduled it. The behavior of what happens when the job succeeds or fails depends on the config options you set in the `JobManager`. By default, successful jobs are removed from storage and failed jobs and kept around so you can diagnose what happened. + +The runner has several modes it can start in depending on how you want it to behave. + +### Dev Modes + +These modes are ideal when you're creating a job and want to be sure it runs correctly while developing. You could also use this in production if you wanted (maybe a job is failing and you want to watch verbose logs and see what's happening). + +```bash +yarn rw jobs work +``` + +This process will stay attached the console and continually look for new jobs and execute them as they are found. The log level is set to `debug` by default so you'll see everything. Pressing `Ctrl-C` to cancel the process (sending `SIGINT`) will start a graceful shutdown: the workers will complete any work they're in the middle of before exiting. To cancel immediately, hit `Ctrl-C` again (or send `SIGTERM`) and they'll stop in the middle of what they're doing. Note that this could leave locked jobs in the database, but they will be picked back up again if a new worker starts with the same name as the one that locked the process. They'll also be picked up automatically after `maxRuntime` has expired, even if they are still locked. + +:::caution Long running jobs + +It's currently up to you to make sure your job completes before your `maxRuntime` limit is reached! NodeJS Promises are not truly cancelable: you can reject early, but any Promises that were started _inside_ will continue running unless they are also early rejected, recursively forever. + +The only way to guarantee a job will completely stop no matter what is for your job to spawn an actual OS level process with a timeout that kills it after a certain amount of time. We may add this functionality natively to Jobs in the near future: let us know if you'd benefit from this being built in! + +::: + +To work on whatever outstanding jobs there are and then automatically exit use the `workoff` mode: + +```bash +yarn rw jobs workoff +``` + +As soon as there are no more jobs to be executed (either the store is empty, or they are scheduled in the future) the process will automatically exit. + +### Clearing the Job Queue + +You can remove all jobs from storage with: + +```bash +yarn rw jobs clear +``` + +### Production Modes + +In production you'll want your job workers running forever in the background. For that, use the `start` mode: + +```bash +yarn rw jobs start +``` + +That will start a number of workers determined by the `workers` config on the `JobManager` and then detach them from the console. If you care about the output of that worker then you'll want to have configured a logger that writes to the filesystem or sends to a third party log aggregator. + +To stop the workers: + +```bash +yarn rw jobs stop +``` + +Or to restart any that are already running: + +```bash +yarn rw jobs restart +``` + +### Multiple Workers + +With the default configuration options generated with the `yarn rw setup jobs` command you'll have one worker group. If you simply want more workers that use the same `adapter` and `queue` settings, increase the `count`: + +```js +export const jobs = new JobManager({ + adapters: { + prisma: new PrismaAdapter({ db, logger }), + }, + queues: ['default'], + logger, + workers: [ + { + adapter: 'prisma', + logger, + queue: '*', + // highlight-next-line + count: 5, + maxAttempts: 24, + maxRuntime: 14_400, + deleteFailedJobs: false, + sleepDelay: 5, + }, + ], +}) +``` + +Now you have 5 workers. If you want to have separate workers working on separate queues, create another worker config object with a different queue name: + +```js +export const jobs = new JobManager({ + adapters: { + prisma: new PrismaAdapter({ db, logger }), + }, + queues: ['default'], + logger, + workers: [ + { + adapter: 'prisma', + logger, + // highlight-start + queue: 'default', + // highlight-end + count: 1, + maxAttempts: 24, + maxRuntime: 14_400, + deleteFailedJobs: false, + sleepDelay: 5, + }, + { + adapter: 'prisma', + logger, + // highlight-start + queue: 'email', + count: 1, + maxAttempts: 1, + maxRuntime: 30, + deleteFailedJobs: true, + // highlight-end + sleepDelay: 5, + }, + ], +}) +``` + +Here, we have 2 workers working on the "default" queue and 1 worker looking at the "email" queue (which will only try a job once, wait 30 seconds for it to finish, and delete the job if it fails). You can also have different worker groups using different adapters. For example, you may have store and work on some jobs in your database using the `PrismaAdapter` and some jobs/workers using a `RedisAdapter`. + +:::info + +We don't currently provide a `RedisAdapter` but plan to add one soon! You'll want to create additional schedulers to use any other adapters as well: + +```js +export const prismaLater = jobs.createScheduler({ + adapter: 'prisma', +}) + +export const redisLater = jobs.createScheduler({ + adapter: 'redis', +}) +``` + +::: + +## Deployment + +For many use cases you may simply be able to rely on the job runner to start your job workers, which will run forever: + +```bash +yarn rw jobs start +``` + +When you deploy new code you'll want to restart your runners to make sure they get the latest source files: + +```bash +yarn rw jobs restart +``` + +Using this utility, however, gives you nothing to monitor that your jobs workers are still running: the runner starts the required number of workers, detaches them, and then exits itself. Node processes are pretty robust, but by no means are they guaranteed to run forever with no problems. You could mistakenly release a bad job that has an infinite loop or even just a random gamma ray striking the RAM of the server could cause a panic and the process will be shut down. + +For maximum reliability you should take a look at the [Advanced Job Workers](#advanced-job-workers) section and manually start your workers this way, with a process monitor like [pm2](https://pm2.keymetrics.io/) or [nodemon](https://github.com/remy/nodemon) to watch and restart the workers if something unexpected happens. + +:::info + +Of course if you have a process monitor system watching your workers you'll to use the process monitor's version of the `restart` command each time you deploy! + +::: + +## Advanced Job Workers + +As noted above, although the workers are started and detached using the `yarn rw jobs start` command, there is nothing to monitor those workers to make sure they keep running. To do that, you'll want to start the workers yourself (or have your process monitor start them) using command line flags. + +You can do this with the `yarn rw-jobs-worker` command. The flags passed to the script tell it which worker group config to use to start itself, and which `id` to give this worker (if you're running more than one). To start a single worker, using the first `workers` config object, you would run: + +```bash +yarn rw-jobs-worker --index=0 --id=0 +``` + +:::info + +The job runner started with `yarn rw jobs start` runs this same command behind the scenes for you, keeping it attached or detached depending on if you start in `work` or `start` mode! + +::: + +### Flags + +- `--index` : a number that represents the index of the `workers` config array you passed to the `JobManager`. Setting this to `0`, for example, uses the first object in the array to set all config options for the worker. +- `--id` : a number identifier that's set as part of the process name. Starting a worker with `--id=0` and then inspecting your process list will show one running named `rw-job-worker.queue-name.0`. Using `yarn rw-jobs-worker` only ever starts a single instance, so if your config had a `count` of `2` you'd need to run the command twice, once with `--id=0` and a second time with `--id=1`. +- `--workoff` : a boolean that will execute all currently available jobs and then cause the worker to exit. Defaults to `false` +- `--clear` : a boolean that starts a worker to remove all jobs from all queues. Defaults to `false` + +Your process monitor can now restart the workers automatically if they crash since the monitor using the worker script itself and not the wrapping job runner. + +### What Happens if a Worker Crashes? + +If a worker crashes because of circumstances outside of your control the job will remained locked in the storage system: the worker couldn't finish work and clean up after itself. When this happens, the job will be picked up again immediately if a new worker starts with the same process title, otherwise when `maxRuntime` has passed it's eligible for any worker to pick up and re-lock. + +## Creating Your Own Adapter + +We'd love the community to contribute adapters for Redwood Job! Take a look at the source for `BaseAdapter` for what's absolutely required, and then the source for `PrismaAdapter` to see a concrete implementation. + +The general gist of the required functions: + +- `find()` should find a job to be run, lock it and return it (minimum return of an object containing `id`, `name`, `path`, `args` and `attempts` properties) +- `schedule()` accepts `name`, `path`, `args`, `runAt`, `queue` and `priority` and should store the job +- `success()` accepts the same job object returned from `find()` and a `deleteJob` boolean for whether the job should be deleted upon success. +- `error()` accepts the same job object returned from `find()` and an error instance. Does whatever failure means to you (like unlock the job and reschedule a time for it to run again in the future) +- `failure()` is called when the job has reached `maxAttempts`. Accepts the job object and a `deleteJob` boolean that says whether the job should be deleted. +- `clear()` remove all jobs from the queue (mostly used in development). + +## The Future + +There's still more to add to background jobs! Our current TODO list: + +- More adapters: Redis, SQS, RabbitMQ... +- RW Studio integration: monitor the state of your outstanding jobs +- Baremetal integration: if jobs are enabled, monitor the workers with pm2 +- Recurring jobs +- Lifecycle hooks: `beforePerform()`, `afterPerform()`, `afterSuccess()`, `afterFailure()` diff --git a/docs/docs/services.md b/docs/docs/services.md index 0b6c49ef88e0..1d625b25c681 100644 --- a/docs/docs/services.md +++ b/docs/docs/services.md @@ -1036,14 +1036,14 @@ Use this function when you want to cache some data, optionally including a numbe ```js // cache forever -const post = ({ id }) => { +const posts = () => { return cache(`posts`, () => { return db.post.findMany() }) } // cache for 1 hour -const post = ({ id }) => { +const posts = () => { return cache( `posts`, () => { @@ -1059,7 +1059,7 @@ Note that a key can be a string or an array: ```js const post = ({ id }) => { return cache(`posts-${id}-${updatedAt.getTime()}`, () => { - return db.post.findMany() + return db.post.findUnique({ where: { id } }) }) } @@ -1067,7 +1067,7 @@ const post = ({ id }) => { const post = ({ id }) => { return cache(['posts', id, updatedAt.getTime()], () => { - return db.post.findMany() + return db.post.findUnique({ where: { id } }) }) } ``` diff --git a/docs/docs/tutorial/chapter7/api-side-currentuser.md b/docs/docs/tutorial/chapter7/api-side-currentuser.md index a1b7b62abcf2..567caaa655a3 100644 --- a/docs/docs/tutorial/chapter7/api-side-currentuser.md +++ b/docs/docs/tutorial/chapter7/api-side-currentuser.md @@ -681,7 +681,7 @@ export const updatePost = async ({ id, input }) => { } else { throw new ForbiddenError("You don't have access to this post") } -// highlight-end + // highlight-end } ``` diff --git a/docs/docs/typescript/introduction.md b/docs/docs/typescript/introduction.md index 7cfe1732f8ae..7ee4ac51d44f 100644 --- a/docs/docs/typescript/introduction.md +++ b/docs/docs/typescript/introduction.md @@ -132,3 +132,22 @@ import { CustomModal } from '@adminUI/CustomModal' 1. **Improved code readability**, by abstracting complex directory hierarchies, and having meaningful names for your imports. 1. **Code maintainability**, aliases allow you to decouple your code from the file structure and more easily move files around, as they are not tied to the longer path. 1. **Reduce boilerplate**, no more `../../src/components/modules/admin/common/ui/` 😮‍💨 + +When you start writing tests for components that contain alias paths, you will need to add the following to your Jest configuration in `jest.config.js`: + +```js +const config = { + rootDir: '../', + preset: '@redwoodjs/testing/config/jest/web', + moduleNameMapper: { + '^@adminUI/(.*)$': + '/web/src/components/modules/admin/common/ui/$1', + }, +} + +module.exports = config +``` + +:::info +There are 3 `jest.config.js` files within a Redwood project. There's one inside the `web` directory, one inside the `api` directory, and one at the root of the project. Since the alias I created is used within the `web` directory, I added the `moduleNameMapper` to the `jest.config.js` file within the `web` directory. +::: diff --git a/docs/sidebars.js b/docs/sidebars.js index 89fe8a5dec39..428fd7a3a761 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -97,6 +97,7 @@ module.exports = { { type: 'doc', id: 'auth/supertokens' }, ], }, + 'background-jobs', 'builds', 'cells', 'cli-commands', diff --git a/docs/static/img/background-jobs/jobs-after.png b/docs/static/img/background-jobs/jobs-after.png new file mode 100644 index 000000000000..67eabc6267c1 Binary files /dev/null and b/docs/static/img/background-jobs/jobs-after.png differ diff --git a/docs/static/img/background-jobs/jobs-before.png b/docs/static/img/background-jobs/jobs-before.png new file mode 100644 index 000000000000..84f138fa9181 Binary files /dev/null and b/docs/static/img/background-jobs/jobs-before.png differ diff --git a/docs/static/img/background-jobs/jobs-db.png b/docs/static/img/background-jobs/jobs-db.png new file mode 100644 index 000000000000..1fd900de565f Binary files /dev/null and b/docs/static/img/background-jobs/jobs-db.png differ diff --git a/docs/static/img/background-jobs/jobs-queues.png b/docs/static/img/background-jobs/jobs-queues.png new file mode 100644 index 000000000000..36693bc6e68d Binary files /dev/null and b/docs/static/img/background-jobs/jobs-queues.png differ diff --git a/docs/static/img/background-jobs/jobs-terminal.png b/docs/static/img/background-jobs/jobs-terminal.png new file mode 100644 index 000000000000..728423533e7d Binary files /dev/null and b/docs/static/img/background-jobs/jobs-terminal.png differ diff --git a/docs/static/img/background-jobs/jobs-workers.png b/docs/static/img/background-jobs/jobs-workers.png new file mode 100644 index 000000000000..5b6b1fe167e9 Binary files /dev/null and b/docs/static/img/background-jobs/jobs-workers.png differ diff --git a/docs/versioned_docs/version-3.x/services.md b/docs/versioned_docs/version-3.x/services.md index ff5f7335cb67..25440c353ef1 100644 --- a/docs/versioned_docs/version-3.x/services.md +++ b/docs/versioned_docs/version-3.x/services.md @@ -316,7 +316,6 @@ validate(input.usPhone, 'US Phone Number', { * `message`: a custom error message if validation fails - ```jsx validate(input.usPhone, { format: { @@ -712,6 +711,7 @@ const createUser = (input) => { ``` You can provide the PrismaClient to be used for the transaction and callback. + ```jsx import { db } from 'src/lib/db' @@ -770,7 +770,7 @@ Why use a cache? If you have an expensive or time-consuming process in your serv :::info What about GraphQL caching? -You could also cache data at the [GraphQL layer](https://community.redwoodjs.com/t/guide-power-of-graphql-caching/2624) which has some of the same benefits. Using Envelop plugins you can add a response cache _after_ your services (resolver functions in the context of GraphQL) run - with a global configuration. +You could also cache data at the [GraphQL layer](https://community.redwoodjs.com/t/guide-power-of-graphql-caching/2624) which has some of the same benefits. Using Envelop plugins you can add a response cache *after* your services (resolver functions in the context of GraphQL) run - with a global configuration. However, by placing the cache one level "lower," at the service level, you get the benefit of caching even when one service calls another internally, or when a service is called via another serverless function, and finer grained control of what you're caching. @@ -782,7 +782,7 @@ In our example above you could cache the GraphQL query for the most popular prod As of this writing, Redwood ships with clients for the two most popular cache backends: [Memcached](https://memcached.org/) and [Redis](https://redis.io/). Service caching wraps each of these in an adapter, which makes it easy to add more clients in the future. If you're interested in adding an adapter for your favorite cache client, [open a issue](https://github.com/redwoodjs/redwood/issues) and tell us about it! Instructions for getting started with the code are [below](#creating-your-own-client). -::: info +:::info If you need to access functionality in your cache client that the `cache()` and `cacheFindMany()` functions do not handle, you can always get access to the underlying raw client library and use it however you want: @@ -795,7 +795,12 @@ export const updatePost = async ({ id, input }) => { where: { id }, }) // highlight-next-line - await cacheClient.MSET(`post-${id}`, JSON.stringify(post), `blogpost-${id}`, JSON.stringify(post)) + await cacheClient.MSET( + `post-${id}`, + JSON.stringify(post), + `blogpost-${id}`, + JSON.stringify(post) + ) return post } @@ -830,7 +835,7 @@ What if we add a "type" into the cache key, so we know what type of thing we're One solution would be to put all of the data that we care about changing into the key, like: `product-41442-${description}`. The problem here is that keys can only be so long (in Memcached it's 250 bytes). Another option could be to hash the entire product object and use that as the key (this can encompass the `product` part of the key as well as the ID itself, since *any* data in the object being different will result in a new hash): ```js -import { md5 } from "blueimp-md5" +import { md5 } from 'blueimp-md5' cache(md5(JSON.stringify(product)), () => { // ... @@ -850,6 +855,7 @@ cache(product, () => { // ... }) ``` + ::: One drawback to this key is in potentially responding to *too many* data changes, even ones we don't care about caching. Imagine that a product has a `views` field that tracks how many times it has been viewed in the browser. This number will be changing all the time, but if we don't display that count to the user then we're constantly re-creating the cache for the product even though no data the user will see is changing. There's no way to tell Prisma "set the `updatedAt` when the record changes, but not if the `views` column changes." This cache key is too variable. One solution would be to move the `views` column to another table with a `productId` pointing back to this record. Now the `product` is back to just containing data we care about caching. @@ -873,7 +879,7 @@ How does that last one work? We get a list of all the keys and then apply a hash ```javascript const product = db.product.findUnique({ where: { id } }) const columns = Object.keys(product) // ['id', 'name', 'sku', ...] -const hash = md5(columns.join(',')) // "e4d7f1b4ed2e42d15898f4b27b019da4" +const hash = md5(columns.join(',')) // "e4d7f1b4ed2e42d15898f4b27b019da4" cache(`v1-product-${hash}-${id}-${updatedAt}`, () => { // ... @@ -889,9 +895,13 @@ Note that this has the side effect of having to select at least one record from You can skirt these issues about what data is changing and what to include or not include in the key by just setting an expiration time on this cache entry. You may decide that if a change is made to a product, it's okay if users don't see the change for, say, an hour. In this case just set the expiration time to 3600 seconds and it will automatically be re-built, whether something changed in the record or not: ```js -cache(`product-${id}`, () => { - // ... -}, { expires: 3600 }) +cache( + `product-${id}`, + () => { + // ... + }, + { expires: 3600 } +) ``` This leads to your product cache being rebuilt every hour, even though you haven't made any changes that are of consequence to the user. But that may be we worth the tradeoff versus rebuilding the cache when *no* useful data has changed (like the `views` column being updated). @@ -981,7 +991,7 @@ The second usage of the logger argument: ```js export const { cache, cacheFindMany } = createCache(client, { logger, - timeout: 500 + timeout: 500, }) ``` @@ -1002,17 +1012,21 @@ Use this function when you want to cache some data, optionally including a numbe ```js // cache forever -const post = ({ id }) => { +const posts = () => { return cache(`posts`, () => { - db.post.findMany() + return db.post.findMany() }) } // cache for 1 hour -const post = ({ id }) => { - return cache(`posts`, () => { - db.post.findMany() - }, { expires: 3600 }) +const posts = () => { + return cache( + `posts`, + () => { + return db.post.findMany() + }, + { expires: 3600 } + ) } ``` @@ -1021,15 +1035,15 @@ Note that a key can be a string or an array: ```js const post = ({ id }) => { return cache(`posts-${id}-${updatedAt.getTime()}`, () => { - db.post.findMany() + return db.post.findUnique({ where: { id } }) }) } // or const post = ({ id }) => { - return cache(['posts', id, updatedAt.getTime()], () => { - db.post.findMany() + return cache(['posts', id, updatedAt.getTime()], () => { + return db.post.findUnique({ where: { id } }) }) } ``` @@ -1057,7 +1071,7 @@ The above is the simplest usage example. If you need to pass a `where`, or any o ```js const post = ({ id }) => { return cacheFindMany(`users`, db.user, { - conditions: { where: { roles: 'admin' } } + conditions: { where: { roles: 'admin' } }, }) } ``` @@ -1068,11 +1082,11 @@ This is functionally equivalent to the following: const latest = await db.user.findFirst({ where: { roles: 'admin' } }, orderBy: { updatedAt: 'desc' }, - select: { id: true, updatedAt: true + select: { id: true, updatedAt: true } }) return cache(`posts-${latest.id}-${latest.updatedAt.getTime()}`, () => { - db.post.findMany({ where: { roles: 'admin' } }) + return db.post.findMany({ where: { roles: 'admin' } }) }) ``` @@ -1080,12 +1094,10 @@ If you also want to pass an `expires` option, do it in the same object as `condi ```js const post = ({ id }) => { - return cacheFindMany( - `users`, db.user, { - conditions: { where: { roles: 'admin' } }, - expires: 86400 - } - ) + return cacheFindMany(`users`, db.user, { + conditions: { where: { roles: 'admin' } }, + expires: 86400, + }) } ``` @@ -1114,7 +1126,7 @@ const updateUser = async ({ id, input }) => { }) ``` -:::caution +:::warning When explicitly deleting cache keys like this you could find yourself going down a rabbit hole. What if there is another service somewhere that also updates user? Or another service that updates an organization, as well as all of its underlying child users at the same time? You'll need to be sure to call `deleteCacheKey()` in these places as well. As a general guideline, it's better to come up with a cache key that encapsulates any triggers for when the data has changed (like the `updatedAt` timestamp, which will change no matter who updates the user, anywhere in your codebase). @@ -1122,8 +1134,8 @@ Scenarios like this are what people are talking about when they say that caching ::: - ### Testing what you cache + We wouldn't just give you all of these caching APIs and not show you how to test it right? You'll find all the details in the [Caching section in the testing doc](testing.md#testing-caching). ### Creating Your Own Client diff --git a/docs/versioned_docs/version-4.x/services.md b/docs/versioned_docs/version-4.x/services.md index 8c79ea963be6..a56958f8efba 100644 --- a/docs/versioned_docs/version-4.x/services.md +++ b/docs/versioned_docs/version-4.x/services.md @@ -317,7 +317,6 @@ validate(input.usPhone, 'US Phone Number', { * `message`: a custom error message if validation fails - ```jsx validate(input.usPhone, { format: { @@ -695,6 +694,7 @@ const createUser = (input) => { ``` You can provide the PrismaClient to be used for the transaction and callback. + ```jsx import { db } from 'src/lib/db' @@ -753,7 +753,7 @@ Why use a cache? If you have an expensive or time-consuming process in your serv :::info What about GraphQL caching? -You could also cache data at the [GraphQL layer](https://community.redwoodjs.com/t/guide-power-of-graphql-caching/2624) which has some of the same benefits. Using Envelop plugins you can add a response cache _after_ your services (resolver functions in the context of GraphQL) run - with a global configuration. +You could also cache data at the [GraphQL layer](https://community.redwoodjs.com/t/guide-power-of-graphql-caching/2624) which has some of the same benefits. Using Envelop plugins you can add a response cache *after* your services (resolver functions in the context of GraphQL) run - with a global configuration. However, by placing the cache one level "lower," at the service level, you get the benefit of caching even when one service calls another internally, or when a service is called via another serverless function, and finer grained control of what you're caching. @@ -765,7 +765,7 @@ In our example above you could cache the GraphQL query for the most popular prod As of this writing, Redwood ships with clients for the two most popular cache backends: [Memcached](https://memcached.org/) and [Redis](https://redis.io/). Service caching wraps each of these in an adapter, which makes it easy to add more clients in the future. If you're interested in adding an adapter for your favorite cache client, [open a issue](https://github.com/redwoodjs/redwood/issues) and tell us about it! Instructions for getting started with the code are [below](#creating-your-own-client). -::: info +:::info If you need to access functionality in your cache client that the `cache()` and `cacheFindMany()` functions do not handle, you can always get access to the underlying raw client library and use it however you want: @@ -778,7 +778,12 @@ export const updatePost = async ({ id, input }) => { where: { id }, }) // highlight-next-line - await cacheClient.MSET(`post-${id}`, JSON.stringify(post), `blogpost-${id}`, JSON.stringify(post)) + await cacheClient.MSET( + `post-${id}`, + JSON.stringify(post), + `blogpost-${id}`, + JSON.stringify(post) + ) return post } @@ -813,7 +818,7 @@ What if we add a "type" into the cache key, so we know what type of thing we're One solution would be to put all of the data that we care about changing into the key, like: `product-41442-${description}`. The problem here is that keys can only be so long (in Memcached it's 250 bytes). Another option could be to hash the entire product object and use that as the key (this can encompass the `product` part of the key as well as the ID itself, since *any* data in the object being different will result in a new hash): ```js -import { md5 } from "blueimp-md5" +import { md5 } from 'blueimp-md5' cache(md5(JSON.stringify(product)), () => { // ... @@ -833,11 +838,12 @@ cache(product, () => { // ... }) ``` + ::: One drawback to this key is in potentially responding to *too many* data changes, even ones we don't care about caching. Imagine that a product has a `views` field that tracks how many times it has been viewed in the browser. This number will be changing all the time, but if we don't display that count to the user then we're constantly re-creating the cache for the product even though no data the user will see is changing. There's no way to tell Prisma "set the `updatedAt` when the record changes, but not if the `views` column changes." This cache key is too variable. One solution would be to move the `views` column to another table with a `productId` pointing back to this record. Now the `product` is back to just containing data we care about caching. -What if you want to expire a cache regardless of whether the data itself has changed? Maybe you make a UI change where you now show a product's SKU on the page where you didn't before. You weren't previously selecing the `sku` field out of the database, and so it hasn't been cached. But now that you're showing it you'll need to add it the list of fields to return from the service. One solution would be forceably update all of the `updatedAt` fields in the database. But a) Prisma won't easily let you do this since it think it controls that column, and b) every product is going to appear to have been edited at the same time, when in fact nothing changed—you just needed to bust the cache. +What if you want to expire a cache regardless of whether the data itself has changed? Maybe you make a UI change where you now show a product's SKU on the page where you didn't before. You weren't previously selecting the `sku` field out of the database, and so it hasn't been cached. But now that you're showing it you'll need to add it the list of fields to return from the service. One solution would be forcibly update all of the `updatedAt` fields in the database. But a) Prisma won't easily let you do this since it think it controls that column, and b) every product is going to appear to have been edited at the same time, when in fact nothing changed—you just needed to bust the cache. An easier solution to this problem would be to add some kind of version number to your cache key that you are in control of and can change whenever you like. Something like appending a `v1` to the key: `v1-product-${id}-${updatedAt}` @@ -856,7 +862,7 @@ How does that last one work? We get a list of all the keys and then apply a hash ```javascript const product = db.product.findUnique({ where: { id } }) const columns = Object.keys(product) // ['id', 'name', 'sku', ...] -const hash = md5(columns.join(',')) // "e4d7f1b4ed2e42d15898f4b27b019da4" +const hash = md5(columns.join(',')) // "e4d7f1b4ed2e42d15898f4b27b019da4" cache(`v1-product-${hash}-${id}-${updatedAt}`, () => { // ... @@ -872,9 +878,13 @@ Note that this has the side effect of having to select at least one record from You can skirt these issues about what data is changing and what to include or not include in the key by just setting an expiration time on this cache entry. You may decide that if a change is made to a product, it's okay if users don't see the change for, say, an hour. In this case just set the expiration time to 3600 seconds and it will automatically be re-built, whether something changed in the record or not: ```js -cache(`product-${id}`, () => { - // ... -}, { expires: 3600 }) +cache( + `product-${id}`, + () => { + // ... + }, + { expires: 3600 } +) ``` This leads to your product cache being rebuilt every hour, even though you haven't made any changes that are of consequence to the user. But that may be we worth the tradeoff versus rebuilding the cache when *no* useful data has changed (like the `views` column being updated). @@ -964,7 +974,7 @@ The second usage of the logger argument: ```js export const { cache, cacheFindMany } = createCache(client, { logger, - timeout: 500 + timeout: 500, }) ``` @@ -985,17 +995,21 @@ Use this function when you want to cache some data, optionally including a numbe ```js // cache forever -const post = ({ id }) => { +const posts = () => { return cache(`posts`, () => { return db.post.findMany() }) } // cache for 1 hour -const post = ({ id }) => { - return cache(`posts`, () => { - return db.post.findMany() - }, { expires: 3600 }) +const posts = () => { + return cache( + `posts`, + () => { + return db.post.findMany() + }, + { expires: 3600 } + ) } ``` @@ -1004,15 +1018,15 @@ Note that a key can be a string or an array: ```js const post = ({ id }) => { return cache(`posts-${id}-${updatedAt.getTime()}`, () => { - return db.post.findMany() + return db.post.findUnique({ where: { id } }) }) } // or const post = ({ id }) => { - return cache(['posts', id, updatedAt.getTime()], () => { - return db.post.findMany() + return cache(['posts', id, updatedAt.getTime()], () => { + return db.post.findUnique({ where: { id } }) }) } ``` @@ -1040,7 +1054,7 @@ The above is the simplest usage example. If you need to pass a `where`, or any o ```js const post = ({ id }) => { return cacheFindMany(`users`, db.user, { - conditions: { where: { roles: 'admin' } } + conditions: { where: { roles: 'admin' } }, }) } ``` @@ -1063,12 +1077,10 @@ If you also want to pass an `expires` option, do it in the same object as `condi ```js const post = ({ id }) => { - return cacheFindMany( - `users`, db.user, { - conditions: { where: { roles: 'admin' } }, - expires: 86400 - } - ) + return cacheFindMany(`users`, db.user, { + conditions: { where: { roles: 'admin' } }, + expires: 86400, + }) } ``` @@ -1080,7 +1092,7 @@ const post = ({ id }) => { ### `deleteCacheKey()` -There may be instances where you want to explictly remove something from the cache so that it gets re-created with the same cache key. A good example is caching a single user, using only their `id` as the cache key. By default, the cache would never bust because a user's `id` is not going to change, no matter how many other fields on user are updated. With `deleteCacheKey()` you can choose to delete the key, for example, when the `updateUser()` service is called. The next time `user()` is called, it will be re-cached with the same key, but it will now contain whatever data was updated. +There may be instances where you want to explicitly remove something from the cache so that it gets re-created with the same cache key. A good example is caching a single user, using only their `id` as the cache key. By default, the cache would never bust because a user's `id` is not going to change, no matter how many other fields on user are updated. With `deleteCacheKey()` you can choose to delete the key, for example, when the `updateUser()` service is called. The next time `user()` is called, it will be re-cached with the same key, but it will now contain whatever data was updated. ```javascript import { cache, deleteCacheKey } from 'src/lib/cache' @@ -1097,16 +1109,16 @@ const updateUser = async ({ id, input }) => { }) ``` -:::caution +:::warning -When explictly deleting cache keys like this you could find yourself going down a rabbit hole. What if there is another service somewhere that also updates user? Or another service that updates an organization, as well as all of its underlying child users at the same time? You'll need to be sure to call `deleteCacheKey()` in these places as well. As a general guideline, it's better to come up with a cache key that encapsulates any triggers for when the data has changed (like the `updatedAt` timestamp, which will change no matter who updates the user, anywhere in your codebase). +When explicitly deleting cache keys like this you could find yourself going down a rabbit hole. What if there is another service somewhere that also updates user? Or another service that updates an organization, as well as all of its underlying child users at the same time? You'll need to be sure to call `deleteCacheKey()` in these places as well. As a general guideline, it's better to come up with a cache key that encapsulates any triggers for when the data has changed (like the `updatedAt` timestamp, which will change no matter who updates the user, anywhere in your codebase). Scenarios like this are what people are talking about when they say that caching is hard! ::: - ### Testing what you cache + We wouldn't just give you all of these caching APIs and not show you how to test it right? You'll find all the details in the [Caching section in the testing doc](testing.md#testing-caching). ### Creating Your Own Client diff --git a/docs/versioned_docs/version-5.x/services.md b/docs/versioned_docs/version-5.x/services.md index 26915ed7aa3f..fe24cb1c32da 100644 --- a/docs/versioned_docs/version-5.x/services.md +++ b/docs/versioned_docs/version-5.x/services.md @@ -323,7 +323,6 @@ validate(input.usPhone, 'US Phone Number', { * `message`: a custom error message if validation fails - ```jsx validate(input.usPhone, { format: { @@ -616,6 +615,7 @@ validate(input.value, 'Value', { } }) ``` + ### validateWithSync() `validateWith()` is simply given a function to execute. This function should throw with a message if there is a problem, otherwise do nothing. @@ -712,6 +712,7 @@ const createUser = (input) => { ``` You can provide the PrismaClient to be used for the transaction and callback. + ```jsx import { db } from 'src/lib/db' @@ -770,7 +771,7 @@ Why use a cache? If you have an expensive or time-consuming process in your serv :::info What about GraphQL caching? -You could also cache data at the [GraphQL layer](https://community.redwoodjs.com/t/guide-power-of-graphql-caching/2624) which has some of the same benefits. Using Envelop plugins you can add a response cache _after_ your services (resolver functions in the context of GraphQL) run - with a global configuration. +You could also cache data at the [GraphQL layer](https://community.redwoodjs.com/t/guide-power-of-graphql-caching/2624) which has some of the same benefits. Using Envelop plugins you can add a response cache *after* your services (resolver functions in the context of GraphQL) run - with a global configuration. However, by placing the cache one level "lower," at the service level, you get the benefit of caching even when one service calls another internally, or when a service is called via another serverless function, and finer grained control of what you're caching. @@ -795,7 +796,12 @@ export const updatePost = async ({ id, input }) => { where: { id }, }) // highlight-next-line - await cacheClient.MSET(`post-${id}`, JSON.stringify(post), `blogpost-${id}`, JSON.stringify(post)) + await cacheClient.MSET( + `post-${id}`, + JSON.stringify(post), + `blogpost-${id}`, + JSON.stringify(post) + ) return post } @@ -830,7 +836,7 @@ What if we add a "type" into the cache key, so we know what type of thing we're One solution would be to put all of the data that we care about changing into the key, like: `product-41442-${description}`. The problem here is that keys can only be so long (in Memcached it's 250 bytes). Another option could be to hash the entire product object and use that as the key (this can encompass the `product` part of the key as well as the ID itself, since *any* data in the object being different will result in a new hash): ```js -import { md5 } from "blueimp-md5" +import { md5 } from 'blueimp-md5' cache(md5(JSON.stringify(product)), () => { // ... @@ -850,6 +856,7 @@ cache(product, () => { // ... }) ``` + ::: One drawback to this key is in potentially responding to *too many* data changes, even ones we don't care about caching. Imagine that a product has a `views` field that tracks how many times it has been viewed in the browser. This number will be changing all the time, but if we don't display that count to the user then we're constantly re-creating the cache for the product even though no data the user will see is changing. There's no way to tell Prisma "set the `updatedAt` when the record changes, but not if the `views` column changes." This cache key is too variable. One solution would be to move the `views` column to another table with a `productId` pointing back to this record. Now the `product` is back to just containing data we care about caching. @@ -873,7 +880,7 @@ How does that last one work? We get a list of all the keys and then apply a hash ```javascript const product = db.product.findUnique({ where: { id } }) const columns = Object.keys(product) // ['id', 'name', 'sku', ...] -const hash = md5(columns.join(',')) // "e4d7f1b4ed2e42d15898f4b27b019da4" +const hash = md5(columns.join(',')) // "e4d7f1b4ed2e42d15898f4b27b019da4" cache(`v1-product-${hash}-${id}-${updatedAt}`, () => { // ... @@ -889,9 +896,13 @@ Note that this has the side effect of having to select at least one record from You can skirt these issues about what data is changing and what to include or not include in the key by just setting an expiration time on this cache entry. You may decide that if a change is made to a product, it's okay if users don't see the change for, say, an hour. In this case just set the expiration time to 3600 seconds and it will automatically be re-built, whether something changed in the record or not: ```js -cache(`product-${id}`, () => { - // ... -}, { expires: 3600 }) +cache( + `product-${id}`, + () => { + // ... + }, + { expires: 3600 } +) ``` This leads to your product cache being rebuilt every hour, even though you haven't made any changes that are of consequence to the user. But that may be we worth the tradeoff versus rebuilding the cache when *no* useful data has changed (like the `views` column being updated). @@ -981,7 +992,7 @@ The second usage of the logger argument: ```js export const { cache, cacheFindMany } = createCache(client, { logger, - timeout: 500 + timeout: 500, }) ``` @@ -1002,17 +1013,21 @@ Use this function when you want to cache some data, optionally including a numbe ```js // cache forever -const post = ({ id }) => { +const posts = () => { return cache(`posts`, () => { return db.post.findMany() }) } // cache for 1 hour -const post = ({ id }) => { - return cache(`posts`, () => { - return db.post.findMany() - }, { expires: 3600 }) +const posts = () => { + return cache( + `posts`, + () => { + return db.post.findMany() + }, + { expires: 3600 } + ) } ``` @@ -1021,15 +1036,15 @@ Note that a key can be a string or an array: ```js const post = ({ id }) => { return cache(`posts-${id}-${updatedAt.getTime()}`, () => { - return db.post.findMany() + return db.post.findUnique({ where: { id } }) }) } // or const post = ({ id }) => { - return cache(['posts', id, updatedAt.getTime()], () => { - return db.post.findMany() + return cache(['posts', id, updatedAt.getTime()], () => { + return db.post.findUnique({ where: { id } }) }) } ``` @@ -1057,7 +1072,7 @@ The above is the simplest usage example. If you need to pass a `where`, or any o ```js const post = ({ id }) => { return cacheFindMany(`users`, db.user, { - conditions: { where: { roles: 'admin' } } + conditions: { where: { roles: 'admin' } }, }) } ``` @@ -1080,12 +1095,10 @@ If you also want to pass an `expires` option, do it in the same object as `condi ```js const post = ({ id }) => { - return cacheFindMany( - `users`, db.user, { - conditions: { where: { roles: 'admin' } }, - expires: 86400 - } - ) + return cacheFindMany(`users`, db.user, { + conditions: { where: { roles: 'admin' } }, + expires: 86400, + }) } ``` @@ -1114,7 +1127,7 @@ const updateUser = async ({ id, input }) => { }) ``` -:::caution +:::warning When explicitly deleting cache keys like this you could find yourself going down a rabbit hole. What if there is another service somewhere that also updates user? Or another service that updates an organization, as well as all of its underlying child users at the same time? You'll need to be sure to call `deleteCacheKey()` in these places as well. As a general guideline, it's better to come up with a cache key that encapsulates any triggers for when the data has changed (like the `updatedAt` timestamp, which will change no matter who updates the user, anywhere in your codebase). @@ -1122,8 +1135,8 @@ Scenarios like this are what people are talking about when they say that caching ::: - ### Testing what you cache + We wouldn't just give you all of these caching APIs and not show you how to test it right? You'll find all the details in the [Caching section in the testing doc](testing.md#testing-caching). ### Creating Your Own Client diff --git a/docs/versioned_docs/version-6.x/services.md b/docs/versioned_docs/version-6.x/services.md index 52bb916f0a8a..a17d5fff940a 100644 --- a/docs/versioned_docs/version-6.x/services.md +++ b/docs/versioned_docs/version-6.x/services.md @@ -323,7 +323,6 @@ validate(input.usPhone, 'US Phone Number', { * `message`: a custom error message if validation fails - ```jsx validate(input.usPhone, { format: { @@ -616,6 +615,7 @@ validate(input.value, 'Value', { } }) ``` + ### validateWithSync() `validateWithSync()` is simply given a function to execute. This function should throw with a message if there is a problem, otherwise do nothing. @@ -712,6 +712,7 @@ const createUser = (input) => { ``` You can provide the PrismaClient to be used for the transaction and callback. + ```jsx import { db } from 'src/lib/db' @@ -770,7 +771,7 @@ Why use a cache? If you have an expensive or time-consuming process in your serv :::info What about GraphQL caching? -You could also cache data at the [GraphQL layer](https://community.redwoodjs.com/t/guide-power-of-graphql-caching/2624) which has some of the same benefits. Using Envelop plugins you can add a response cache _after_ your services (resolver functions in the context of GraphQL) run - with a global configuration. +You could also cache data at the [GraphQL layer](https://community.redwoodjs.com/t/guide-power-of-graphql-caching/2624) which has some of the same benefits. Using Envelop plugins you can add a response cache *after* your services (resolver functions in the context of GraphQL) run - with a global configuration. However, by placing the cache one level "lower," at the service level, you get the benefit of caching even when one service calls another internally, or when a service is called via another serverless function, and finer grained control of what you're caching. @@ -795,7 +796,12 @@ export const updatePost = async ({ id, input }) => { where: { id }, }) // highlight-next-line - await cacheClient.MSET(`post-${id}`, JSON.stringify(post), `blogpost-${id}`, JSON.stringify(post)) + await cacheClient.MSET( + `post-${id}`, + JSON.stringify(post), + `blogpost-${id}`, + JSON.stringify(post) + ) return post } @@ -830,7 +836,7 @@ What if we add a "type" into the cache key, so we know what type of thing we're One solution would be to put all of the data that we care about changing into the key, like: `product-41442-${description}`. The problem here is that keys can only be so long (in Memcached it's 250 bytes). Another option could be to hash the entire product object and use that as the key (this can encompass the `product` part of the key as well as the ID itself, since *any* data in the object being different will result in a new hash): ```js -import { md5 } from "blueimp-md5" +import { md5 } from 'blueimp-md5' cache(md5(JSON.stringify(product)), () => { // ... @@ -850,6 +856,7 @@ cache(product, () => { // ... }) ``` + ::: One drawback to this key is in potentially responding to *too many* data changes, even ones we don't care about caching. Imagine that a product has a `views` field that tracks how many times it has been viewed in the browser. This number will be changing all the time, but if we don't display that count to the user then we're constantly re-creating the cache for the product even though no data the user will see is changing. There's no way to tell Prisma "set the `updatedAt` when the record changes, but not if the `views` column changes." This cache key is too variable. One solution would be to move the `views` column to another table with a `productId` pointing back to this record. Now the `product` is back to just containing data we care about caching. @@ -873,7 +880,7 @@ How does that last one work? We get a list of all the keys and then apply a hash ```javascript const product = db.product.findUnique({ where: { id } }) const columns = Object.keys(product) // ['id', 'name', 'sku', ...] -const hash = md5(columns.join(',')) // "e4d7f1b4ed2e42d15898f4b27b019da4" +const hash = md5(columns.join(',')) // "e4d7f1b4ed2e42d15898f4b27b019da4" cache(`v1-product-${hash}-${id}-${updatedAt}`, () => { // ... @@ -889,9 +896,13 @@ Note that this has the side effect of having to select at least one record from You can skirt these issues about what data is changing and what to include or not include in the key by just setting an expiration time on this cache entry. You may decide that if a change is made to a product, it's okay if users don't see the change for, say, an hour. In this case just set the expiration time to 3600 seconds and it will automatically be re-built, whether something changed in the record or not: ```js -cache(`product-${id}`, () => { - // ... -}, { expires: 3600 }) +cache( + `product-${id}`, + () => { + // ... + }, + { expires: 3600 } +) ``` This leads to your product cache being rebuilt every hour, even though you haven't made any changes that are of consequence to the user. But that may be we worth the tradeoff versus rebuilding the cache when *no* useful data has changed (like the `views` column being updated). @@ -981,7 +992,7 @@ The second usage of the logger argument: ```js export const { cache, cacheFindMany } = createCache(client, { logger, - timeout: 500 + timeout: 500, }) ``` @@ -1002,17 +1013,21 @@ Use this function when you want to cache some data, optionally including a numbe ```js // cache forever -const post = ({ id }) => { +const posts = () => { return cache(`posts`, () => { return db.post.findMany() }) } // cache for 1 hour -const post = ({ id }) => { - return cache(`posts`, () => { - return db.post.findMany() - }, { expires: 3600 }) +const posts = () => { + return cache( + `posts`, + () => { + return db.post.findMany() + }, + { expires: 3600 } + ) } ``` @@ -1021,15 +1036,15 @@ Note that a key can be a string or an array: ```js const post = ({ id }) => { return cache(`posts-${id}-${updatedAt.getTime()}`, () => { - return db.post.findMany() + return db.post.findUnique({ where: { id } }) }) } // or const post = ({ id }) => { - return cache(['posts', id, updatedAt.getTime()], () => { - return db.post.findMany() + return cache(['posts', id, updatedAt.getTime()], () => { + return db.post.findUnique({ where: { id } }) }) } ``` @@ -1057,7 +1072,7 @@ The above is the simplest usage example. If you need to pass a `where`, or any o ```js const post = ({ id }) => { return cacheFindMany(`users`, db.user, { - conditions: { where: { roles: 'admin' } } + conditions: { where: { roles: 'admin' } }, }) } ``` @@ -1080,12 +1095,10 @@ If you also want to pass an `expires` option, do it in the same object as `condi ```js const post = ({ id }) => { - return cacheFindMany( - `users`, db.user, { - conditions: { where: { roles: 'admin' } }, - expires: 86400 - } - ) + return cacheFindMany(`users`, db.user, { + conditions: { where: { roles: 'admin' } }, + expires: 86400, + }) } ``` @@ -1122,8 +1135,8 @@ Scenarios like this are what people are talking about when they say that caching ::: - ### Testing what you cache + We wouldn't just give you all of these caching APIs and not show you how to test it right? You'll find all the details in the [Caching section in the testing doc](testing.md#testing-caching). ### Creating Your Own Client diff --git a/docs/versioned_docs/version-7.0/services.md b/docs/versioned_docs/version-7.0/services.md index 52bb916f0a8a..a17d5fff940a 100644 --- a/docs/versioned_docs/version-7.0/services.md +++ b/docs/versioned_docs/version-7.0/services.md @@ -323,7 +323,6 @@ validate(input.usPhone, 'US Phone Number', { * `message`: a custom error message if validation fails - ```jsx validate(input.usPhone, { format: { @@ -616,6 +615,7 @@ validate(input.value, 'Value', { } }) ``` + ### validateWithSync() `validateWithSync()` is simply given a function to execute. This function should throw with a message if there is a problem, otherwise do nothing. @@ -712,6 +712,7 @@ const createUser = (input) => { ``` You can provide the PrismaClient to be used for the transaction and callback. + ```jsx import { db } from 'src/lib/db' @@ -770,7 +771,7 @@ Why use a cache? If you have an expensive or time-consuming process in your serv :::info What about GraphQL caching? -You could also cache data at the [GraphQL layer](https://community.redwoodjs.com/t/guide-power-of-graphql-caching/2624) which has some of the same benefits. Using Envelop plugins you can add a response cache _after_ your services (resolver functions in the context of GraphQL) run - with a global configuration. +You could also cache data at the [GraphQL layer](https://community.redwoodjs.com/t/guide-power-of-graphql-caching/2624) which has some of the same benefits. Using Envelop plugins you can add a response cache *after* your services (resolver functions in the context of GraphQL) run - with a global configuration. However, by placing the cache one level "lower," at the service level, you get the benefit of caching even when one service calls another internally, or when a service is called via another serverless function, and finer grained control of what you're caching. @@ -795,7 +796,12 @@ export const updatePost = async ({ id, input }) => { where: { id }, }) // highlight-next-line - await cacheClient.MSET(`post-${id}`, JSON.stringify(post), `blogpost-${id}`, JSON.stringify(post)) + await cacheClient.MSET( + `post-${id}`, + JSON.stringify(post), + `blogpost-${id}`, + JSON.stringify(post) + ) return post } @@ -830,7 +836,7 @@ What if we add a "type" into the cache key, so we know what type of thing we're One solution would be to put all of the data that we care about changing into the key, like: `product-41442-${description}`. The problem here is that keys can only be so long (in Memcached it's 250 bytes). Another option could be to hash the entire product object and use that as the key (this can encompass the `product` part of the key as well as the ID itself, since *any* data in the object being different will result in a new hash): ```js -import { md5 } from "blueimp-md5" +import { md5 } from 'blueimp-md5' cache(md5(JSON.stringify(product)), () => { // ... @@ -850,6 +856,7 @@ cache(product, () => { // ... }) ``` + ::: One drawback to this key is in potentially responding to *too many* data changes, even ones we don't care about caching. Imagine that a product has a `views` field that tracks how many times it has been viewed in the browser. This number will be changing all the time, but if we don't display that count to the user then we're constantly re-creating the cache for the product even though no data the user will see is changing. There's no way to tell Prisma "set the `updatedAt` when the record changes, but not if the `views` column changes." This cache key is too variable. One solution would be to move the `views` column to another table with a `productId` pointing back to this record. Now the `product` is back to just containing data we care about caching. @@ -873,7 +880,7 @@ How does that last one work? We get a list of all the keys and then apply a hash ```javascript const product = db.product.findUnique({ where: { id } }) const columns = Object.keys(product) // ['id', 'name', 'sku', ...] -const hash = md5(columns.join(',')) // "e4d7f1b4ed2e42d15898f4b27b019da4" +const hash = md5(columns.join(',')) // "e4d7f1b4ed2e42d15898f4b27b019da4" cache(`v1-product-${hash}-${id}-${updatedAt}`, () => { // ... @@ -889,9 +896,13 @@ Note that this has the side effect of having to select at least one record from You can skirt these issues about what data is changing and what to include or not include in the key by just setting an expiration time on this cache entry. You may decide that if a change is made to a product, it's okay if users don't see the change for, say, an hour. In this case just set the expiration time to 3600 seconds and it will automatically be re-built, whether something changed in the record or not: ```js -cache(`product-${id}`, () => { - // ... -}, { expires: 3600 }) +cache( + `product-${id}`, + () => { + // ... + }, + { expires: 3600 } +) ``` This leads to your product cache being rebuilt every hour, even though you haven't made any changes that are of consequence to the user. But that may be we worth the tradeoff versus rebuilding the cache when *no* useful data has changed (like the `views` column being updated). @@ -981,7 +992,7 @@ The second usage of the logger argument: ```js export const { cache, cacheFindMany } = createCache(client, { logger, - timeout: 500 + timeout: 500, }) ``` @@ -1002,17 +1013,21 @@ Use this function when you want to cache some data, optionally including a numbe ```js // cache forever -const post = ({ id }) => { +const posts = () => { return cache(`posts`, () => { return db.post.findMany() }) } // cache for 1 hour -const post = ({ id }) => { - return cache(`posts`, () => { - return db.post.findMany() - }, { expires: 3600 }) +const posts = () => { + return cache( + `posts`, + () => { + return db.post.findMany() + }, + { expires: 3600 } + ) } ``` @@ -1021,15 +1036,15 @@ Note that a key can be a string or an array: ```js const post = ({ id }) => { return cache(`posts-${id}-${updatedAt.getTime()}`, () => { - return db.post.findMany() + return db.post.findUnique({ where: { id } }) }) } // or const post = ({ id }) => { - return cache(['posts', id, updatedAt.getTime()], () => { - return db.post.findMany() + return cache(['posts', id, updatedAt.getTime()], () => { + return db.post.findUnique({ where: { id } }) }) } ``` @@ -1057,7 +1072,7 @@ The above is the simplest usage example. If you need to pass a `where`, or any o ```js const post = ({ id }) => { return cacheFindMany(`users`, db.user, { - conditions: { where: { roles: 'admin' } } + conditions: { where: { roles: 'admin' } }, }) } ``` @@ -1080,12 +1095,10 @@ If you also want to pass an `expires` option, do it in the same object as `condi ```js const post = ({ id }) => { - return cacheFindMany( - `users`, db.user, { - conditions: { where: { roles: 'admin' } }, - expires: 86400 - } - ) + return cacheFindMany(`users`, db.user, { + conditions: { where: { roles: 'admin' } }, + expires: 86400, + }) } ``` @@ -1122,8 +1135,8 @@ Scenarios like this are what people are talking about when they say that caching ::: - ### Testing what you cache + We wouldn't just give you all of these caching APIs and not show you how to test it right? You'll find all the details in the [Caching section in the testing doc](testing.md#testing-caching). ### Creating Your Own Client diff --git a/packages/api-server/src/logFormatter/bin.ts b/packages/api-server/src/logFormatter/bin.ts index e3695b27f706..c677fe67b759 100644 --- a/packages/api-server/src/logFormatter/bin.ts +++ b/packages/api-server/src/logFormatter/bin.ts @@ -6,3 +6,9 @@ const input = process.stdin const output = process.stdout input.pipe(split(LogFormatter())).pipe(output) + +// assume that receiving a SIGINT (Ctrl-C) is a normal event, so don't exit with +// a 129 error code, which makes execa blow up. Just return a nice quiet 0. +process.on('SIGINT', () => { + process.exit(0) +}) diff --git a/packages/api-server/tsconfig.json b/packages/api-server/tsconfig.json index 914a03416a01..35e7e976b7ba 100644 --- a/packages/api-server/tsconfig.json +++ b/packages/api-server/tsconfig.json @@ -1,12 +1,11 @@ { "extends": "../../tsconfig.compilerOption.json", "compilerOptions": { - "rootDir": "src", - "outDir": "dist", + "isolatedModules": true, "emitDeclarationOnly": false, "noEmit": true }, - "include": ["src", "ambient.d.ts"], + "include": ["."], "references": [ { "path": "../internal" }, { "path": "../project-config" }, diff --git a/packages/auth-providers/auth0/web/.babelrc.js b/packages/auth-providers/auth0/web/.babelrc.js deleted file mode 100644 index 4312886a07e5..000000000000 --- a/packages/auth-providers/auth0/web/.babelrc.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = { extends: '../../../../babel.config.js' } diff --git a/packages/auth-providers/auth0/web/build.ts b/packages/auth-providers/auth0/web/build.ts new file mode 100644 index 000000000000..afc14135d939 --- /dev/null +++ b/packages/auth-providers/auth0/web/build.ts @@ -0,0 +1,17 @@ +import { buildCjs, buildEsm } from '@redwoodjs/framework-tools' +import { + generateTypesCjs, + generateTypesEsm, + insertCommonJsPackageJson, +} from '@redwoodjs/framework-tools/generateTypes' + +// ESM build and type generation +await buildEsm() +await generateTypesEsm() + +// CJS build, type generation, and package.json insert +await buildCjs() +await generateTypesCjs() +await insertCommonJsPackageJson({ + buildFileUrl: import.meta.url, +}) diff --git a/packages/auth-providers/auth0/web/package.json b/packages/auth-providers/auth0/web/package.json index 32120c49f3cc..78575dab93ba 100644 --- a/packages/auth-providers/auth0/web/package.json +++ b/packages/auth-providers/auth0/web/package.json @@ -7,32 +7,58 @@ "directory": "packages/auth-providers/auth0/web" }, "license": "MIT", - "main": "./dist/index.js", + "type": "module", + "exports": { + ".": { + "import": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + }, + "default": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + }, + "./dist/auth0": { + "import": { + "types": "./dist/auth0.d.ts", + "default": "./dist/auth0.js" + }, + "default": { + "types": "./dist/cjs/auth0.d.ts", + "default": "./dist/cjs/auth0.js" + } + } + }, + "main": "./dist/cjs/index.js", + "module": "./dist/index.js", "types": "./dist/index.d.ts", "files": [ "dist" ], "scripts": { - "build": "yarn build:js && yarn build:types", - "build:js": "babel src -d dist --extensions \".js,.jsx,.ts,.tsx\" --copy-files --no-copy-ignored", + "build": "tsx ./build.ts", "build:pack": "yarn pack -o redwoodjs-auth-auth0-web.tgz", - "build:types": "tsc --build --verbose", + "build:types": "tsc --build --verbose ./tsconfig.build.json", + "build:types-cjs": "tsc --build --verbose ./tsconfig.cjs.json", "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx,template\" --ignore dist --exec \"yarn build\"", + "check:attw": "yarn rw-fwtools-attw", + "check:package": "concurrently npm:check:attw yarn:publint", "prepublishOnly": "NODE_ENV=production yarn build", "test": "vitest run", "test:watch": "vitest watch" }, "dependencies": { - "@babel/runtime-corejs3": "7.25.0", - "@redwoodjs/auth": "workspace:*", - "core-js": "3.38.0" + "@redwoodjs/auth": "workspace:*" }, "devDependencies": { "@auth0/auth0-spa-js": "2.1.3", - "@babel/cli": "7.24.8", - "@babel/core": "^7.22.20", + "@redwoodjs/framework-tools": "workspace:*", "@types/react": "^18.2.55", + "concurrently": "8.2.2", + "publint": "0.2.10", "react": "19.0.0-rc-8269d55d-20240802", + "tsx": "4.17.0", "typescript": "5.5.4", "vitest": "2.0.5" }, diff --git a/packages/auth-providers/auth0/web/src/__tests__/auth0.test.tsx b/packages/auth-providers/auth0/web/src/__tests__/auth0.test.tsx index aab7df2c6836..59fde602082a 100644 --- a/packages/auth-providers/auth0/web/src/__tests__/auth0.test.tsx +++ b/packages/auth-providers/auth0/web/src/__tests__/auth0.test.tsx @@ -9,7 +9,7 @@ import { vi, beforeAll, beforeEach, describe, expect, it } from 'vitest' import type { CurrentUser } from '@redwoodjs/auth' -import { createAuth } from '../auth0' +import { createAuth } from '../auth0.js' const user: User = { sub: 'unique_user_id', diff --git a/packages/auth-providers/auth0/web/src/index.ts b/packages/auth-providers/auth0/web/src/index.ts index de1dfbd2d3f9..aace852c623c 100644 --- a/packages/auth-providers/auth0/web/src/index.ts +++ b/packages/auth-providers/auth0/web/src/index.ts @@ -1 +1 @@ -export { createAuth } from './auth0' +export { createAuth } from './auth0.js' diff --git a/packages/auth-providers/auth0/web/tsconfig.build.json b/packages/auth-providers/auth0/web/tsconfig.build.json new file mode 100644 index 000000000000..e7e0fedfaba0 --- /dev/null +++ b/packages/auth-providers/auth0/web/tsconfig.build.json @@ -0,0 +1,15 @@ +{ + "extends": "../../../../tsconfig.compilerOption.json", + "compilerOptions": { + "strict": true, + "rootDir": "src", + "outDir": "dist", + "tsBuildInfoFile": "./tsconfig.build.tsbuildinfo" + }, + "include": ["src"], + "references": [ + { + "path": "./../../../auth/tsconfig.build.json" + } + ] +} diff --git a/packages/web/tsconfig.types-cjs.json b/packages/auth-providers/auth0/web/tsconfig.cjs.json similarity index 62% rename from packages/web/tsconfig.types-cjs.json rename to packages/auth-providers/auth0/web/tsconfig.cjs.json index 0eea039941c9..a660cecf11ff 100644 --- a/packages/web/tsconfig.types-cjs.json +++ b/packages/auth-providers/auth0/web/tsconfig.cjs.json @@ -2,6 +2,6 @@ "extends": "./tsconfig.build.json", "compilerOptions": { "outDir": "dist/cjs", - "tsBuildInfoFile": "tsconfig.types-cjs.tsbuildinfo" + "tsBuildInfoFile": "./tsconfig.cjs.tsbuildinfo" } } diff --git a/packages/auth-providers/auth0/web/tsconfig.json b/packages/auth-providers/auth0/web/tsconfig.json index 6f1a0dfbbe25..a31dd6f9faaa 100644 --- a/packages/auth-providers/auth0/web/tsconfig.json +++ b/packages/auth-providers/auth0/web/tsconfig.json @@ -2,9 +2,13 @@ "extends": "../../../../tsconfig.compilerOption.json", "compilerOptions": { "strict": true, - "rootDir": "src", - "outDir": "dist" + "module": "Node16", + "moduleResolution": "Node16" }, - "include": ["src"], - "references": [{ "path": "../../../auth/tsconfig.build.json" }] + "include": ["."], + "references": [ + { "path": "../../../auth/tsconfig.build.json" }, + { "path": "../../../framework-tools" } + ], + "exclude": ["dist", "node_modules", "**/__mocks__", "**/__tests__/fixtures"] } diff --git a/packages/auth-providers/azureActiveDirectory/web/.babelrc.js b/packages/auth-providers/azureActiveDirectory/web/.babelrc.js deleted file mode 100644 index 4312886a07e5..000000000000 --- a/packages/auth-providers/azureActiveDirectory/web/.babelrc.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = { extends: '../../../../babel.config.js' } diff --git a/packages/auth-providers/azureActiveDirectory/web/build.ts b/packages/auth-providers/azureActiveDirectory/web/build.ts new file mode 100644 index 000000000000..afc14135d939 --- /dev/null +++ b/packages/auth-providers/azureActiveDirectory/web/build.ts @@ -0,0 +1,17 @@ +import { buildCjs, buildEsm } from '@redwoodjs/framework-tools' +import { + generateTypesCjs, + generateTypesEsm, + insertCommonJsPackageJson, +} from '@redwoodjs/framework-tools/generateTypes' + +// ESM build and type generation +await buildEsm() +await generateTypesEsm() + +// CJS build, type generation, and package.json insert +await buildCjs() +await generateTypesCjs() +await insertCommonJsPackageJson({ + buildFileUrl: import.meta.url, +}) diff --git a/packages/auth-providers/azureActiveDirectory/web/package.json b/packages/auth-providers/azureActiveDirectory/web/package.json index 907854a967c7..45e19ab48e61 100644 --- a/packages/auth-providers/azureActiveDirectory/web/package.json +++ b/packages/auth-providers/azureActiveDirectory/web/package.json @@ -7,33 +7,59 @@ "directory": "packages/auth-providers/azureActiveDirectory/web" }, "license": "MIT", - "main": "./dist/index.js", + "type": "module", + "exports": { + ".": { + "import": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + }, + "default": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + }, + "./dist/azureActiveDirectory": { + "import": { + "types": "./dist/azureActiveDirectory.d.ts", + "default": "./dist/azureActiveDirectory.js" + }, + "default": { + "types": "./dist/cjs/azureActiveDirectory.d.ts", + "default": "./dist/cjs/azureActiveDirectory.js" + } + } + }, + "main": "./dist/cjs/index.js", + "module": "./dist/index.js", "types": "./dist/index.d.ts", "files": [ "dist" ], "scripts": { - "build": "yarn build:js && yarn build:types", - "build:js": "babel src -d dist --extensions \".js,.jsx,.ts,.tsx\" --copy-files --no-copy-ignored", + "build": "tsx ./build.ts", "build:pack": "yarn pack -o redwoodjs-auth-azure-active-directory-web.tgz", - "build:types": "tsc --build --verbose", + "build:types": "tsc --build --verbose ./tsconfig.build.json", + "build:types-cjs": "tsc --build --verbose ./tsconfig.cjs.json", "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx,template\" --ignore dist --exec \"yarn build\"", + "check:attw": "yarn rw-fwtools-attw", + "check:package": "concurrently npm:check:attw yarn:publint", "prepublishOnly": "NODE_ENV=production yarn build", "test": "vitest run", "test:watch": "vitest watch" }, "dependencies": { - "@babel/runtime-corejs3": "7.25.0", - "@redwoodjs/auth": "workspace:*", - "core-js": "3.38.0" + "@redwoodjs/auth": "workspace:*" }, "devDependencies": { "@azure/msal-browser": "2.39.0", - "@babel/cli": "7.24.8", - "@babel/core": "^7.22.20", + "@redwoodjs/framework-tools": "workspace:*", "@types/netlify-identity-widget": "1.9.6", "@types/react": "^18.2.55", + "concurrently": "8.2.2", + "publint": "0.2.10", "react": "19.0.0-rc-8269d55d-20240802", + "tsx": "4.17.0", "typescript": "5.5.4", "vitest": "2.0.5" }, diff --git a/packages/auth-providers/azureActiveDirectory/web/src/__tests__/azureActiveDirectory.test.tsx b/packages/auth-providers/azureActiveDirectory/web/src/__tests__/azureActiveDirectory.test.tsx index adc839800bb4..6f41501c0993 100644 --- a/packages/auth-providers/azureActiveDirectory/web/src/__tests__/azureActiveDirectory.test.tsx +++ b/packages/auth-providers/azureActiveDirectory/web/src/__tests__/azureActiveDirectory.test.tsx @@ -8,7 +8,7 @@ import { vi, it, expect, describe, beforeAll, beforeEach } from 'vitest' import type { CurrentUser } from '@redwoodjs/auth' -import { createAuth } from '../azureActiveDirectory' +import { createAuth } from '../azureActiveDirectory.js' const user: AccountInfo = { name: 'John', diff --git a/packages/auth-providers/azureActiveDirectory/web/src/index.ts b/packages/auth-providers/azureActiveDirectory/web/src/index.ts index 3c947e8dd6ba..fb6a6d420727 100644 --- a/packages/auth-providers/azureActiveDirectory/web/src/index.ts +++ b/packages/auth-providers/azureActiveDirectory/web/src/index.ts @@ -1 +1 @@ -export { createAuth } from './azureActiveDirectory' +export { createAuth } from './azureActiveDirectory.js' diff --git a/packages/auth-providers/azureActiveDirectory/web/tsconfig.build.json b/packages/auth-providers/azureActiveDirectory/web/tsconfig.build.json new file mode 100644 index 000000000000..e7e0fedfaba0 --- /dev/null +++ b/packages/auth-providers/azureActiveDirectory/web/tsconfig.build.json @@ -0,0 +1,15 @@ +{ + "extends": "../../../../tsconfig.compilerOption.json", + "compilerOptions": { + "strict": true, + "rootDir": "src", + "outDir": "dist", + "tsBuildInfoFile": "./tsconfig.build.tsbuildinfo" + }, + "include": ["src"], + "references": [ + { + "path": "./../../../auth/tsconfig.build.json" + } + ] +} diff --git a/packages/auth/tsconfig.types-cjs.json b/packages/auth-providers/azureActiveDirectory/web/tsconfig.cjs.json similarity index 62% rename from packages/auth/tsconfig.types-cjs.json rename to packages/auth-providers/azureActiveDirectory/web/tsconfig.cjs.json index 07cf70f4ba81..a660cecf11ff 100644 --- a/packages/auth/tsconfig.types-cjs.json +++ b/packages/auth-providers/azureActiveDirectory/web/tsconfig.cjs.json @@ -2,6 +2,6 @@ "extends": "./tsconfig.build.json", "compilerOptions": { "outDir": "dist/cjs", - "tsBuildInfoFile": "./tsconfig.types-cjs.tsbuildinfo" + "tsBuildInfoFile": "./tsconfig.cjs.tsbuildinfo" } } diff --git a/packages/auth-providers/azureActiveDirectory/web/tsconfig.json b/packages/auth-providers/azureActiveDirectory/web/tsconfig.json index 6f1a0dfbbe25..189511d06433 100644 --- a/packages/auth-providers/azureActiveDirectory/web/tsconfig.json +++ b/packages/auth-providers/azureActiveDirectory/web/tsconfig.json @@ -2,9 +2,12 @@ "extends": "../../../../tsconfig.compilerOption.json", "compilerOptions": { "strict": true, - "rootDir": "src", - "outDir": "dist" + "module": "Node16", + "moduleResolution": "Node16" }, - "include": ["src"], - "references": [{ "path": "../../../auth/tsconfig.build.json" }] + "include": ["."], + "references": [ + { "path": "../../../auth/tsconfig.build.json" }, + { "path": "../../../framework-tools" } + ] } diff --git a/packages/auth-providers/clerk/web/.babelrc.js b/packages/auth-providers/clerk/web/.babelrc.js deleted file mode 100644 index 4312886a07e5..000000000000 --- a/packages/auth-providers/clerk/web/.babelrc.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = { extends: '../../../../babel.config.js' } diff --git a/packages/auth-providers/clerk/web/build.ts b/packages/auth-providers/clerk/web/build.ts new file mode 100644 index 000000000000..afc14135d939 --- /dev/null +++ b/packages/auth-providers/clerk/web/build.ts @@ -0,0 +1,17 @@ +import { buildCjs, buildEsm } from '@redwoodjs/framework-tools' +import { + generateTypesCjs, + generateTypesEsm, + insertCommonJsPackageJson, +} from '@redwoodjs/framework-tools/generateTypes' + +// ESM build and type generation +await buildEsm() +await generateTypesEsm() + +// CJS build, type generation, and package.json insert +await buildCjs() +await generateTypesCjs() +await insertCommonJsPackageJson({ + buildFileUrl: import.meta.url, +}) diff --git a/packages/auth-providers/clerk/web/package.json b/packages/auth-providers/clerk/web/package.json index 9d3350f1a98c..bd9dd4546fc4 100644 --- a/packages/auth-providers/clerk/web/package.json +++ b/packages/auth-providers/clerk/web/package.json @@ -7,33 +7,59 @@ "directory": "packages/auth-providers/clerk/web" }, "license": "MIT", - "main": "./dist/index.js", + "type": "module", + "exports": { + ".": { + "import": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + }, + "default": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + }, + "./dist/clerk": { + "import": { + "types": "./dist/clerk.d.ts", + "default": "./dist/clerk.js" + }, + "default": { + "types": "./dist/cjs/clerk.d.ts", + "default": "./dist/cjs/clerk.js" + } + } + }, + "main": "./dist/cjs/index.js", + "module": "./dist/index.js", "types": "./dist/index.d.ts", "files": [ "dist" ], "scripts": { - "build": "yarn build:js && yarn build:types", - "build:js": "babel src -d dist --extensions \".js,.jsx,.ts,.tsx\" --copy-files --no-copy-ignored", + "build": "tsx ./build.ts", "build:pack": "yarn pack -o redwoodjs-auth-clerk-web.tgz", - "build:types": "tsc --build --verbose", + "build:types": "tsc --build --verbose ./tsconfig.build.json", + "build:types-cjs": "tsc --build --verbose ./tsconfig.cjs.json", "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx,template\" --ignore dist --exec \"yarn build\"", + "check:attw": "yarn rw-fwtools-attw", + "check:package": "concurrently npm:check:attw yarn:publint", "prepublishOnly": "NODE_ENV=production yarn build", "test": "vitest run", "test:watch": "vitest watch" }, "dependencies": { - "@babel/runtime-corejs3": "7.25.0", - "@redwoodjs/auth": "workspace:*", - "core-js": "3.38.0" + "@redwoodjs/auth": "workspace:*" }, "devDependencies": { - "@babel/cli": "7.24.8", - "@babel/core": "^7.22.20", "@clerk/clerk-react": "4.32.3", "@clerk/types": "3.65.3", + "@redwoodjs/framework-tools": "workspace:*", "@types/react": "^18.2.55", + "concurrently": "8.2.2", + "publint": "0.2.10", "react": "19.0.0-rc-8269d55d-20240802", + "tsx": "4.17.0", "typescript": "5.5.4", "vitest": "2.0.5" }, diff --git a/packages/auth-providers/clerk/web/src/__tests__/clerk.test.tsx b/packages/auth-providers/clerk/web/src/__tests__/clerk.test.tsx index 7a5334f5d82a..15018407810d 100644 --- a/packages/auth-providers/clerk/web/src/__tests__/clerk.test.tsx +++ b/packages/auth-providers/clerk/web/src/__tests__/clerk.test.tsx @@ -9,7 +9,7 @@ import { vi, expect, describe, it, beforeAll, beforeEach } from 'vitest' import type { CurrentUser } from '@redwoodjs/auth' -import { createAuth } from '../clerk' +import { createAuth } from '../clerk.js' const user: Partial = { id: 'unique_user_id', diff --git a/packages/auth-providers/clerk/web/src/index.ts b/packages/auth-providers/clerk/web/src/index.ts index c3bddea6584a..b232033b05fd 100644 --- a/packages/auth-providers/clerk/web/src/index.ts +++ b/packages/auth-providers/clerk/web/src/index.ts @@ -1 +1 @@ -export { createAuth } from './clerk' +export { createAuth } from './clerk.js' diff --git a/packages/auth-providers/clerk/web/tsconfig.build.json b/packages/auth-providers/clerk/web/tsconfig.build.json new file mode 100644 index 000000000000..e7e0fedfaba0 --- /dev/null +++ b/packages/auth-providers/clerk/web/tsconfig.build.json @@ -0,0 +1,15 @@ +{ + "extends": "../../../../tsconfig.compilerOption.json", + "compilerOptions": { + "strict": true, + "rootDir": "src", + "outDir": "dist", + "tsBuildInfoFile": "./tsconfig.build.tsbuildinfo" + }, + "include": ["src"], + "references": [ + { + "path": "./../../../auth/tsconfig.build.json" + } + ] +} diff --git a/packages/auth-providers/clerk/web/tsconfig.cjs.json b/packages/auth-providers/clerk/web/tsconfig.cjs.json new file mode 100644 index 000000000000..a660cecf11ff --- /dev/null +++ b/packages/auth-providers/clerk/web/tsconfig.cjs.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig.build.json", + "compilerOptions": { + "outDir": "dist/cjs", + "tsBuildInfoFile": "./tsconfig.cjs.tsbuildinfo" + } +} diff --git a/packages/auth-providers/clerk/web/tsconfig.json b/packages/auth-providers/clerk/web/tsconfig.json index 6f1a0dfbbe25..a31dd6f9faaa 100644 --- a/packages/auth-providers/clerk/web/tsconfig.json +++ b/packages/auth-providers/clerk/web/tsconfig.json @@ -2,9 +2,13 @@ "extends": "../../../../tsconfig.compilerOption.json", "compilerOptions": { "strict": true, - "rootDir": "src", - "outDir": "dist" + "module": "Node16", + "moduleResolution": "Node16" }, - "include": ["src"], - "references": [{ "path": "../../../auth/tsconfig.build.json" }] + "include": ["."], + "references": [ + { "path": "../../../auth/tsconfig.build.json" }, + { "path": "../../../framework-tools" } + ], + "exclude": ["dist", "node_modules", "**/__mocks__", "**/__tests__/fixtures"] } diff --git a/packages/auth-providers/dbAuth/middleware/build.mts b/packages/auth-providers/dbAuth/middleware/build.mts index f21a64f67a07..4f9ebbf82169 100644 --- a/packages/auth-providers/dbAuth/middleware/build.mts +++ b/packages/auth-providers/dbAuth/middleware/build.mts @@ -1,30 +1,14 @@ -import { build, defaultBuildOptions } from '@redwoodjs/framework-tools' +import { buildExternalCjs, buildExternalEsm } from '@redwoodjs/framework-tools' import { generateTypesCjs, generateTypesEsm, insertCommonJsPackageJson, } from '@redwoodjs/framework-tools/generateTypes' -// ESM build -await build({ - buildOptions: { - ...defaultBuildOptions, - format: 'esm', - packages: 'external', - }, -}) +await buildExternalEsm() await generateTypesEsm() -// CJS build -await build({ - buildOptions: { - ...defaultBuildOptions, - outdir: 'dist/cjs', - packages: 'external', - }, -}) +await buildExternalCjs() await generateTypesCjs() -await insertCommonJsPackageJson({ - buildFileUrl: import.meta.url, - cjsDir: 'dist/cjs', -}) + +await insertCommonJsPackageJson({ buildFileUrl: import.meta.url }) diff --git a/packages/auth-providers/dbAuth/middleware/package.json b/packages/auth-providers/dbAuth/middleware/package.json index d5e81b6a6828..79a00f41f849 100644 --- a/packages/auth-providers/dbAuth/middleware/package.json +++ b/packages/auth-providers/dbAuth/middleware/package.json @@ -28,7 +28,7 @@ "scripts": { "build": "tsx ./build.mts", "build:pack": "yarn pack -o redwoodjs-auth-dbauth-middleware.tgz", - "build:types": "tsc --build --verbose ./tsconfig.json", + "build:types": "tsc --build --verbose ./tsconfig.build.json", "build:types-cjs": "tsc --build --verbose tsconfig.cjs.json", "check:attw": "yarn attw -P", "check:package": "concurrently npm:check:attw yarn:publint", diff --git a/packages/auth-providers/dbAuth/middleware/src/__tests__/defaultGetRoles.test.ts b/packages/auth-providers/dbAuth/middleware/src/__tests__/defaultGetRoles.test.ts index 04e0d2ca2d3c..99ade190aaa1 100644 --- a/packages/auth-providers/dbAuth/middleware/src/__tests__/defaultGetRoles.test.ts +++ b/packages/auth-providers/dbAuth/middleware/src/__tests__/defaultGetRoles.test.ts @@ -1,6 +1,6 @@ import { describe, expect, it } from 'vitest' -import { defaultGetRoles } from '../defaultGetRoles' +import { defaultGetRoles } from '../defaultGetRoles.js' describe('dbAuth: defaultGetRoles', () => { it('returns an empty array if no roles are present', () => { diff --git a/packages/auth-providers/dbAuth/middleware/src/__tests__/initDbAuthMiddleware.test.ts b/packages/auth-providers/dbAuth/middleware/src/__tests__/initDbAuthMiddleware.test.ts index 7f1a20d1fce7..f973fc1ee9f6 100644 --- a/packages/auth-providers/dbAuth/middleware/src/__tests__/initDbAuthMiddleware.test.ts +++ b/packages/auth-providers/dbAuth/middleware/src/__tests__/initDbAuthMiddleware.test.ts @@ -8,9 +8,10 @@ import { MiddlewareResponse, } from '@redwoodjs/web/middleware' -import { middlewareDefaultAuthProviderState } from '../../../../../auth/dist/AuthProvider/AuthProviderState' -import type { DbAuthMiddlewareOptions } from '../index' -import { initDbAuthMiddleware } from '../index' +import { middlewareDefaultAuthProviderState } from '../../../../../auth/dist/AuthProvider/AuthProviderState.js' +import type { DbAuthMiddlewareOptions } from '../index.js' +import { initDbAuthMiddleware } from '../index.js' + const FIXTURE_PATH = path.resolve( __dirname, '../../../../../../__fixtures__/example-todo-main', @@ -39,6 +40,8 @@ beforeAll(() => { mockedSession: 'this_is_the_only_correct_session', } } + + return undefined }), }, } diff --git a/packages/auth-providers/dbAuth/middleware/src/defaultGetRoles.ts b/packages/auth-providers/dbAuth/middleware/src/defaultGetRoles.ts index 799035680c7c..c589eaadf4fe 100644 --- a/packages/auth-providers/dbAuth/middleware/src/defaultGetRoles.ts +++ b/packages/auth-providers/dbAuth/middleware/src/defaultGetRoles.ts @@ -1,4 +1,6 @@ -export const defaultGetRoles = (decoded: Record): string[] => { +export const defaultGetRoles = ( + decoded: Record | undefined | null, +): string[] => { try { const roles = decoded?.currentUser?.roles diff --git a/packages/auth-providers/dbAuth/middleware/tsconfig.build.json b/packages/auth-providers/dbAuth/middleware/tsconfig.build.json new file mode 100644 index 000000000000..3e4cfa0b360f --- /dev/null +++ b/packages/auth-providers/dbAuth/middleware/tsconfig.build.json @@ -0,0 +1,16 @@ +{ + "extends": "../../../../tsconfig.compilerOption.json", + "compilerOptions": { + "strict": true, + "rootDir": "src", + "outDir": "dist", + "module": "Node16", + "moduleResolution": "Node16", + "tsBuildInfoFile": "./tsconfig.build.tsbuildinfo" + }, + "include": ["src/**/*"], + "references": [ + { "path": "../../../auth/tsconfig.build.json" }, + { "path": "../../../vite/tsconfig.build.json" } + ] +} diff --git a/packages/auth-providers/dbAuth/middleware/tsconfig.cjs.json b/packages/auth-providers/dbAuth/middleware/tsconfig.cjs.json index eaa211040f2f..a660cecf11ff 100644 --- a/packages/auth-providers/dbAuth/middleware/tsconfig.cjs.json +++ b/packages/auth-providers/dbAuth/middleware/tsconfig.cjs.json @@ -1,5 +1,5 @@ { - "extends": "./tsconfig.json", + "extends": "./tsconfig.build.json", "compilerOptions": { "outDir": "dist/cjs", "tsBuildInfoFile": "./tsconfig.cjs.tsbuildinfo" diff --git a/packages/auth-providers/dbAuth/middleware/tsconfig.json b/packages/auth-providers/dbAuth/middleware/tsconfig.json index 2d457ccfb155..a9e9ebdd342f 100644 --- a/packages/auth-providers/dbAuth/middleware/tsconfig.json +++ b/packages/auth-providers/dbAuth/middleware/tsconfig.json @@ -2,13 +2,12 @@ "extends": "../../../../tsconfig.compilerOption.json", "compilerOptions": { "strict": true, - "rootDir": "src", "outDir": "dist", "module": "Node16", - "moduleResolution": "Node16", - "tsBuildInfoFile": "./tsconfig.tsbuildinfo" + "moduleResolution": "Node16" }, - "include": ["src/**/*"], + "include": ["."], + "exclude": ["dist", "node_modules", "**/__mocks__", "**/__fixtures__"], "references": [ { "path": "../../../auth/tsconfig.build.json" }, { "path": "../../../vite/tsconfig.build.json" } diff --git a/packages/auth-providers/netlify/web/.babelrc.js b/packages/auth-providers/netlify/web/.babelrc.js deleted file mode 100644 index 4312886a07e5..000000000000 --- a/packages/auth-providers/netlify/web/.babelrc.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = { extends: '../../../../babel.config.js' } diff --git a/packages/auth-providers/netlify/web/build.ts b/packages/auth-providers/netlify/web/build.ts new file mode 100644 index 000000000000..afc14135d939 --- /dev/null +++ b/packages/auth-providers/netlify/web/build.ts @@ -0,0 +1,17 @@ +import { buildCjs, buildEsm } from '@redwoodjs/framework-tools' +import { + generateTypesCjs, + generateTypesEsm, + insertCommonJsPackageJson, +} from '@redwoodjs/framework-tools/generateTypes' + +// ESM build and type generation +await buildEsm() +await generateTypesEsm() + +// CJS build, type generation, and package.json insert +await buildCjs() +await generateTypesCjs() +await insertCommonJsPackageJson({ + buildFileUrl: import.meta.url, +}) diff --git a/packages/auth-providers/netlify/web/package.json b/packages/auth-providers/netlify/web/package.json index 224050bb1044..cf737416a6ee 100644 --- a/packages/auth-providers/netlify/web/package.json +++ b/packages/auth-providers/netlify/web/package.json @@ -7,32 +7,58 @@ "directory": "packages/auth-providers/netlify/web" }, "license": "MIT", - "main": "./dist/index.js", + "type": "module", + "exports": { + ".": { + "import": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + }, + "default": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + }, + "./dist/netlify": { + "import": { + "types": "./dist/netlify.d.ts", + "default": "./dist/netlify.js" + }, + "default": { + "types": "./dist/cjs/netlify.d.ts", + "default": "./dist/cjs/netlify.js" + } + } + }, + "main": "./dist/cjs/index.js", + "module": "./dist/index.js", "types": "./dist/index.d.ts", "files": [ "dist" ], "scripts": { - "build": "yarn build:js && yarn build:types", - "build:js": "babel src -d dist --extensions \".js,.jsx,.ts,.tsx\" --copy-files --no-copy-ignored", + "build": "tsx ./build.ts", "build:pack": "yarn pack -o redwoodjs-auth-netlify-web.tgz", - "build:types": "tsc --build --verbose", + "build:types": "tsc --build --verbose ./tsconfig.build.json", + "build:types-cjs": "tsc --build --verbose ./tsconfig.cjs.json", "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx,template\" --ignore dist --exec \"yarn build\"", + "check:attw": "yarn rw-fwtools-attw", + "check:package": "concurrently npm:check:attw yarn:publint", "prepublishOnly": "NODE_ENV=production yarn build", "test": "vitest run", "test:watch": "vitest watch" }, "dependencies": { - "@babel/runtime-corejs3": "7.25.0", - "@redwoodjs/auth": "workspace:*", - "core-js": "3.38.0" + "@redwoodjs/auth": "workspace:*" }, "devDependencies": { - "@babel/cli": "7.24.8", - "@babel/core": "^7.22.20", + "@redwoodjs/framework-tools": "workspace:*", "@types/netlify-identity-widget": "1.9.6", "@types/react": "^18.2.55", + "concurrently": "8.2.2", + "publint": "0.2.10", "react": "19.0.0-rc-8269d55d-20240802", + "tsx": "4.17.0", "typescript": "5.5.4", "vitest": "2.0.5" }, diff --git a/packages/auth-providers/netlify/web/src/__tests__/netlify.test.tsx b/packages/auth-providers/netlify/web/src/__tests__/netlify.test.tsx index 611e3a3543e2..c5b12ebc5a60 100644 --- a/packages/auth-providers/netlify/web/src/__tests__/netlify.test.tsx +++ b/packages/auth-providers/netlify/web/src/__tests__/netlify.test.tsx @@ -4,7 +4,7 @@ import { vi, expect, it, beforeAll, beforeEach, describe } from 'vitest' import type { CurrentUser } from '@redwoodjs/auth' -import { createAuth } from '../netlify' +import { createAuth } from '../netlify.js' type NetlifyIdentity = typeof NetlifyIdentityNS type User = NetlifyIdentityNS.User diff --git a/packages/auth-providers/netlify/web/src/index.ts b/packages/auth-providers/netlify/web/src/index.ts index ff6d45586e48..de207efcdd47 100644 --- a/packages/auth-providers/netlify/web/src/index.ts +++ b/packages/auth-providers/netlify/web/src/index.ts @@ -1 +1 @@ -export { createAuth } from './netlify' +export { createAuth } from './netlify.js' diff --git a/packages/auth-providers/netlify/web/tsconfig.build.json b/packages/auth-providers/netlify/web/tsconfig.build.json new file mode 100644 index 000000000000..e7e0fedfaba0 --- /dev/null +++ b/packages/auth-providers/netlify/web/tsconfig.build.json @@ -0,0 +1,15 @@ +{ + "extends": "../../../../tsconfig.compilerOption.json", + "compilerOptions": { + "strict": true, + "rootDir": "src", + "outDir": "dist", + "tsBuildInfoFile": "./tsconfig.build.tsbuildinfo" + }, + "include": ["src"], + "references": [ + { + "path": "./../../../auth/tsconfig.build.json" + } + ] +} diff --git a/packages/auth-providers/netlify/web/tsconfig.cjs.json b/packages/auth-providers/netlify/web/tsconfig.cjs.json new file mode 100644 index 000000000000..a660cecf11ff --- /dev/null +++ b/packages/auth-providers/netlify/web/tsconfig.cjs.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig.build.json", + "compilerOptions": { + "outDir": "dist/cjs", + "tsBuildInfoFile": "./tsconfig.cjs.tsbuildinfo" + } +} diff --git a/packages/auth-providers/netlify/web/tsconfig.json b/packages/auth-providers/netlify/web/tsconfig.json index 6f1a0dfbbe25..a31dd6f9faaa 100644 --- a/packages/auth-providers/netlify/web/tsconfig.json +++ b/packages/auth-providers/netlify/web/tsconfig.json @@ -2,9 +2,13 @@ "extends": "../../../../tsconfig.compilerOption.json", "compilerOptions": { "strict": true, - "rootDir": "src", - "outDir": "dist" + "module": "Node16", + "moduleResolution": "Node16" }, - "include": ["src"], - "references": [{ "path": "../../../auth/tsconfig.build.json" }] + "include": ["."], + "references": [ + { "path": "../../../auth/tsconfig.build.json" }, + { "path": "../../../framework-tools" } + ], + "exclude": ["dist", "node_modules", "**/__mocks__", "**/__tests__/fixtures"] } diff --git a/packages/auth-providers/supabase/middleware/build.mts b/packages/auth-providers/supabase/middleware/build.mts index f21a64f67a07..4f9ebbf82169 100644 --- a/packages/auth-providers/supabase/middleware/build.mts +++ b/packages/auth-providers/supabase/middleware/build.mts @@ -1,30 +1,14 @@ -import { build, defaultBuildOptions } from '@redwoodjs/framework-tools' +import { buildExternalCjs, buildExternalEsm } from '@redwoodjs/framework-tools' import { generateTypesCjs, generateTypesEsm, insertCommonJsPackageJson, } from '@redwoodjs/framework-tools/generateTypes' -// ESM build -await build({ - buildOptions: { - ...defaultBuildOptions, - format: 'esm', - packages: 'external', - }, -}) +await buildExternalEsm() await generateTypesEsm() -// CJS build -await build({ - buildOptions: { - ...defaultBuildOptions, - outdir: 'dist/cjs', - packages: 'external', - }, -}) +await buildExternalCjs() await generateTypesCjs() -await insertCommonJsPackageJson({ - buildFileUrl: import.meta.url, - cjsDir: 'dist/cjs', -}) + +await insertCommonJsPackageJson({ buildFileUrl: import.meta.url }) diff --git a/packages/auth-providers/supabase/middleware/package.json b/packages/auth-providers/supabase/middleware/package.json index 82d29f7ee049..fb799ea6c586 100644 --- a/packages/auth-providers/supabase/middleware/package.json +++ b/packages/auth-providers/supabase/middleware/package.json @@ -29,7 +29,7 @@ "scripts": { "build": "tsx ./build.mts", "build:pack": "yarn pack -o redwoodjs-auth-supabase-middleware.tgz", - "build:types": "tsc --build --verbose ./tsconfig.json", + "build:types": "tsc --build --verbose ./tsconfig.build.json", "build:types-cjs": "tsc --build --verbose ./tsconfig.cjs.json", "check:attw": "yarn attw -P", "check:package": "concurrently npm:check:attw yarn:publint", diff --git a/packages/auth-providers/supabase/middleware/src/__tests__/defaultGetRoles.test.ts b/packages/auth-providers/supabase/middleware/src/__tests__/defaultGetRoles.test.ts index 5621e5d6ee61..c7b25c4c1901 100644 --- a/packages/auth-providers/supabase/middleware/src/__tests__/defaultGetRoles.test.ts +++ b/packages/auth-providers/supabase/middleware/src/__tests__/defaultGetRoles.test.ts @@ -1,6 +1,6 @@ import { describe, expect, it } from 'vitest' -import { defaultGetRoles } from '../defaultGetRoles' +import { defaultGetRoles } from '../defaultGetRoles.js' describe('dbAuth: defaultGetRoles', () => { it('returns an empty array if no roles are present', () => { diff --git a/packages/auth-providers/supabase/middleware/src/__tests__/initSupabaseAuthMiddleware.test.ts b/packages/auth-providers/supabase/middleware/src/__tests__/initSupabaseAuthMiddleware.test.ts index b4d5027ce586..3a16e8625db8 100644 --- a/packages/auth-providers/supabase/middleware/src/__tests__/initSupabaseAuthMiddleware.test.ts +++ b/packages/auth-providers/supabase/middleware/src/__tests__/initSupabaseAuthMiddleware.test.ts @@ -10,8 +10,9 @@ import { MiddlewareResponse, } from '@redwoodjs/web/middleware' -import initSupabaseAuthMiddleware from '../index' -import type { SupabaseAuthMiddlewareOptions } from '../index' +import initSupabaseAuthMiddleware from '../index.js' +import type { SupabaseAuthMiddlewareOptions } from '../index.js' + const FIXTURE_PATH = path.resolve( __dirname, '../../../../../../__fixtures__/example-todo-main', diff --git a/packages/auth-providers/supabase/middleware/src/defaultGetRoles.ts b/packages/auth-providers/supabase/middleware/src/defaultGetRoles.ts index 23873da4bf6b..3cbf5a4628ef 100644 --- a/packages/auth-providers/supabase/middleware/src/defaultGetRoles.ts +++ b/packages/auth-providers/supabase/middleware/src/defaultGetRoles.ts @@ -33,7 +33,8 @@ interface PartialSupabaseDecoded { app_metadata: { - roles?: string + [key: string]: unknown + roles?: string | undefined } } diff --git a/packages/auth-providers/supabase/middleware/tsconfig.build.json b/packages/auth-providers/supabase/middleware/tsconfig.build.json new file mode 100644 index 000000000000..3e4cfa0b360f --- /dev/null +++ b/packages/auth-providers/supabase/middleware/tsconfig.build.json @@ -0,0 +1,16 @@ +{ + "extends": "../../../../tsconfig.compilerOption.json", + "compilerOptions": { + "strict": true, + "rootDir": "src", + "outDir": "dist", + "module": "Node16", + "moduleResolution": "Node16", + "tsBuildInfoFile": "./tsconfig.build.tsbuildinfo" + }, + "include": ["src/**/*"], + "references": [ + { "path": "../../../auth/tsconfig.build.json" }, + { "path": "../../../vite/tsconfig.build.json" } + ] +} diff --git a/packages/auth-providers/supabase/middleware/tsconfig.cjs.json b/packages/auth-providers/supabase/middleware/tsconfig.cjs.json index eaa211040f2f..a660cecf11ff 100644 --- a/packages/auth-providers/supabase/middleware/tsconfig.cjs.json +++ b/packages/auth-providers/supabase/middleware/tsconfig.cjs.json @@ -1,5 +1,5 @@ { - "extends": "./tsconfig.json", + "extends": "./tsconfig.build.json", "compilerOptions": { "outDir": "dist/cjs", "tsBuildInfoFile": "./tsconfig.cjs.tsbuildinfo" diff --git a/packages/auth-providers/supabase/middleware/tsconfig.json b/packages/auth-providers/supabase/middleware/tsconfig.json index 2d457ccfb155..a9e9ebdd342f 100644 --- a/packages/auth-providers/supabase/middleware/tsconfig.json +++ b/packages/auth-providers/supabase/middleware/tsconfig.json @@ -2,13 +2,12 @@ "extends": "../../../../tsconfig.compilerOption.json", "compilerOptions": { "strict": true, - "rootDir": "src", "outDir": "dist", "module": "Node16", - "moduleResolution": "Node16", - "tsBuildInfoFile": "./tsconfig.tsbuildinfo" + "moduleResolution": "Node16" }, - "include": ["src/**/*"], + "include": ["."], + "exclude": ["dist", "node_modules", "**/__mocks__", "**/__fixtures__"], "references": [ { "path": "../../../auth/tsconfig.build.json" }, { "path": "../../../vite/tsconfig.build.json" } diff --git a/packages/auth-providers/supabase/web/.babelrc.js b/packages/auth-providers/supabase/web/.babelrc.js deleted file mode 100644 index 4312886a07e5..000000000000 --- a/packages/auth-providers/supabase/web/.babelrc.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = { extends: '../../../../babel.config.js' } diff --git a/packages/auth-providers/supabase/web/build.ts b/packages/auth-providers/supabase/web/build.ts new file mode 100644 index 000000000000..afc14135d939 --- /dev/null +++ b/packages/auth-providers/supabase/web/build.ts @@ -0,0 +1,17 @@ +import { buildCjs, buildEsm } from '@redwoodjs/framework-tools' +import { + generateTypesCjs, + generateTypesEsm, + insertCommonJsPackageJson, +} from '@redwoodjs/framework-tools/generateTypes' + +// ESM build and type generation +await buildEsm() +await generateTypesEsm() + +// CJS build, type generation, and package.json insert +await buildCjs() +await generateTypesCjs() +await insertCommonJsPackageJson({ + buildFileUrl: import.meta.url, +}) diff --git a/packages/auth-providers/supabase/web/package.json b/packages/auth-providers/supabase/web/package.json index 05891bb87721..652d89e05b53 100644 --- a/packages/auth-providers/supabase/web/package.json +++ b/packages/auth-providers/supabase/web/package.json @@ -7,33 +7,59 @@ "directory": "packages/auth-providers/supabase/web" }, "license": "MIT", - "main": "./dist/index.js", + "type": "module", + "exports": { + ".": { + "import": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + }, + "default": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + }, + "./dist/supabase": { + "import": { + "types": "./dist/supabase.d.ts", + "default": "./dist/supabase.js" + }, + "default": { + "types": "./dist/cjs/supabase.d.ts", + "default": "./dist/cjs/supabase.js" + } + } + }, + "main": "./dist/cjs/index.js", + "module": "./dist/index.js", "types": "./dist/index.d.ts", "files": [ "dist" ], "scripts": { - "build": "yarn build:js && yarn build:types", - "build:js": "babel src -d dist --extensions \".js,.jsx,.ts,.tsx\" --copy-files --no-copy-ignored", + "build": "tsx ./build.ts", "build:pack": "yarn pack -o redwoodjs-auth-supabase-web.tgz", - "build:types": "tsc --build --verbose", + "build:types": "tsc --build --verbose ./tsconfig.build.json", + "build:types-cjs": "tsc --build --verbose ./tsconfig.cjs.json", "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx,template\" --ignore dist --exec \"yarn build\"", + "check:attw": "yarn rw-fwtools-attw", + "check:package": "concurrently npm:check:attw yarn:publint", "prepublishOnly": "NODE_ENV=production yarn build", "test": "vitest run", "test:watch": "vitest watch" }, "dependencies": { - "@babel/runtime-corejs3": "7.25.0", - "@redwoodjs/auth": "workspace:*", - "core-js": "3.38.0" + "@redwoodjs/auth": "workspace:*" }, "devDependencies": { - "@babel/cli": "7.24.8", - "@babel/core": "^7.22.20", + "@redwoodjs/framework-tools": "workspace:*", "@supabase/ssr": "0.4.0", "@supabase/supabase-js": "2.45.1", "@types/react": "^18.2.55", + "concurrently": "8.2.2", + "publint": "0.2.10", "react": "19.0.0-rc-8269d55d-20240802", + "tsx": "4.17.0", "typescript": "5.5.4", "vitest": "2.0.5" }, diff --git a/packages/auth-providers/supabase/web/src/__tests__/supabase.middleware.test.tsx b/packages/auth-providers/supabase/web/src/__tests__/supabase.middleware.test.tsx index 4bb7cc617f35..e9cebc220e2c 100644 --- a/packages/auth-providers/supabase/web/src/__tests__/supabase.middleware.test.tsx +++ b/packages/auth-providers/supabase/web/src/__tests__/supabase.middleware.test.tsx @@ -5,13 +5,13 @@ import { vi, it, describe, beforeAll, beforeEach, expect } from 'vitest' import type { CurrentUser } from '@redwoodjs/auth' -import { createAuth } from '../supabase' +import { createAuth } from '../supabase.js' import { mockSupabaseAuthClient, loggedInUser, adminUser, -} from './mockSupabaseAuthClient' +} from './mockSupabaseAuthClient.js' const supabaseMockClient = { auth: mockSupabaseAuthClient, diff --git a/packages/auth-providers/supabase/web/src/__tests__/supabase.test.tsx b/packages/auth-providers/supabase/web/src/__tests__/supabase.test.tsx index bedae632e3b3..b2b1ebcacadc 100644 --- a/packages/auth-providers/supabase/web/src/__tests__/supabase.test.tsx +++ b/packages/auth-providers/supabase/web/src/__tests__/supabase.test.tsx @@ -4,13 +4,13 @@ import { vi, it, describe, beforeAll, beforeEach, expect } from 'vitest' import type { CurrentUser } from '@redwoodjs/auth' -import { createAuth } from '../supabase' +import { createAuth } from '../supabase.js' import { mockSupabaseAuthClient, loggedInUser, adminUser, -} from './mockSupabaseAuthClient' +} from './mockSupabaseAuthClient.js' const supabaseMockClient = { auth: mockSupabaseAuthClient, diff --git a/packages/auth-providers/supabase/web/src/index.ts b/packages/auth-providers/supabase/web/src/index.ts index 3c69819e3661..ce5f8bbbabc5 100644 --- a/packages/auth-providers/supabase/web/src/index.ts +++ b/packages/auth-providers/supabase/web/src/index.ts @@ -1 +1 @@ -export { createAuth } from './supabase' +export { createAuth } from './supabase.js' diff --git a/packages/auth-providers/supabase/web/tsconfig.build.json b/packages/auth-providers/supabase/web/tsconfig.build.json new file mode 100644 index 000000000000..f6d6e2f70d82 --- /dev/null +++ b/packages/auth-providers/supabase/web/tsconfig.build.json @@ -0,0 +1,15 @@ +{ + "extends": "../../../../tsconfig.compilerOption.json", + "compilerOptions": { + "strict": true, + "rootDir": "src", + "outDir": "dist", + "tsBuildInfoFile": "./tsconfig.build.tsbuildinfo" + }, + "include": ["src", "./ambient.d.ts"], + "references": [ + { + "path": "./../../../auth/tsconfig.build.json" + } + ] +} diff --git a/packages/auth-providers/supabase/web/tsconfig.cjs.json b/packages/auth-providers/supabase/web/tsconfig.cjs.json new file mode 100644 index 000000000000..a660cecf11ff --- /dev/null +++ b/packages/auth-providers/supabase/web/tsconfig.cjs.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig.build.json", + "compilerOptions": { + "outDir": "dist/cjs", + "tsBuildInfoFile": "./tsconfig.cjs.tsbuildinfo" + } +} diff --git a/packages/auth-providers/supabase/web/tsconfig.json b/packages/auth-providers/supabase/web/tsconfig.json index 44a4bf6185e8..a31dd6f9faaa 100644 --- a/packages/auth-providers/supabase/web/tsconfig.json +++ b/packages/auth-providers/supabase/web/tsconfig.json @@ -2,9 +2,13 @@ "extends": "../../../../tsconfig.compilerOption.json", "compilerOptions": { "strict": true, - "rootDir": "src", - "outDir": "dist" + "module": "Node16", + "moduleResolution": "Node16" }, - "include": ["src", "./ambient.d.ts"], - "references": [{ "path": "../../../auth/tsconfig.build.json" }] + "include": ["."], + "references": [ + { "path": "../../../auth/tsconfig.build.json" }, + { "path": "../../../framework-tools" } + ], + "exclude": ["dist", "node_modules", "**/__mocks__", "**/__tests__/fixtures"] } diff --git a/packages/auth-providers/supertokens/web/.babelrc.js b/packages/auth-providers/supertokens/web/.babelrc.js deleted file mode 100644 index 4312886a07e5..000000000000 --- a/packages/auth-providers/supertokens/web/.babelrc.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = { extends: '../../../../babel.config.js' } diff --git a/packages/auth-providers/supertokens/web/build.ts b/packages/auth-providers/supertokens/web/build.ts new file mode 100644 index 000000000000..afc14135d939 --- /dev/null +++ b/packages/auth-providers/supertokens/web/build.ts @@ -0,0 +1,17 @@ +import { buildCjs, buildEsm } from '@redwoodjs/framework-tools' +import { + generateTypesCjs, + generateTypesEsm, + insertCommonJsPackageJson, +} from '@redwoodjs/framework-tools/generateTypes' + +// ESM build and type generation +await buildEsm() +await generateTypesEsm() + +// CJS build, type generation, and package.json insert +await buildCjs() +await generateTypesCjs() +await insertCommonJsPackageJson({ + buildFileUrl: import.meta.url, +}) diff --git a/packages/auth-providers/supertokens/web/package.json b/packages/auth-providers/supertokens/web/package.json index 91d4a5d1781c..86626b8cb3e4 100644 --- a/packages/auth-providers/supertokens/web/package.json +++ b/packages/auth-providers/supertokens/web/package.json @@ -7,32 +7,58 @@ "directory": "packages/auth-providers/supertokens/web" }, "license": "MIT", - "main": "./dist/index.js", + "type": "module", + "exports": { + ".": { + "import": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + }, + "default": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + }, + "./dist/supertokens": { + "import": { + "types": "./dist/supertokens.d.ts", + "default": "./dist/supertokens.js" + }, + "default": { + "types": "./dist/cjs/supertokens.d.ts", + "default": "./dist/cjs/supertokens.js" + } + } + }, + "main": "./dist/cjs/index.js", + "module": "./dist/index.js", "types": "./dist/index.d.ts", "files": [ "dist" ], "scripts": { - "build": "yarn build:js && yarn build:types", - "build:js": "babel src -d dist --extensions \".js,.jsx,.ts,.tsx\" --copy-files --no-copy-ignored", + "build": "tsx ./build.ts", "build:pack": "yarn pack -o redwoodjs-auth-supertokens-web.tgz", - "build:types": "tsc --build --verbose", + "build:types": "tsc --build --verbose ./tsconfig.build.json", + "build:types-cjs": "tsc --build --verbose ./tsconfig.cjs.json", "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx,template\" --ignore dist --exec \"yarn build\"", + "check:attw": "yarn rw-fwtools-attw", + "check:package": "concurrently npm:check:attw yarn:publint", "prepublishOnly": "NODE_ENV=production yarn build", "test": "vitest run", "test:watch": "vitest watch" }, "dependencies": { - "@babel/runtime-corejs3": "7.25.0", - "@redwoodjs/auth": "workspace:*", - "core-js": "3.38.0" + "@redwoodjs/auth": "workspace:*" }, "devDependencies": { - "@babel/cli": "7.24.8", - "@babel/core": "^7.22.20", + "@redwoodjs/framework-tools": "workspace:*", "@types/react": "^18.2.55", + "concurrently": "8.2.2", + "publint": "0.2.10", "react": "19.0.0-rc-8269d55d-20240802", "supertokens-auth-react": "0.39.1", + "tsx": "4.17.0", "typescript": "5.5.4", "vitest": "2.0.5" }, diff --git a/packages/auth-providers/supertokens/web/src/__tests__/supertokens.test.tsx b/packages/auth-providers/supertokens/web/src/__tests__/supertokens.test.tsx index 556f0ed3ddf9..5e6e3cde43b0 100644 --- a/packages/auth-providers/supertokens/web/src/__tests__/supertokens.test.tsx +++ b/packages/auth-providers/supertokens/web/src/__tests__/supertokens.test.tsx @@ -7,8 +7,8 @@ import type { SuperTokensUser, SessionRecipe, SuperTokensAuth, -} from '../supertokens' -import { createAuth } from '../supertokens' +} from '../supertokens.js' +import { createAuth } from '../supertokens.js' const user: SuperTokensUser = { userId: 'unique_user_id', diff --git a/packages/auth-providers/supertokens/web/src/index.ts b/packages/auth-providers/supertokens/web/src/index.ts index f38ac2769dac..2fb39f5ed586 100644 --- a/packages/auth-providers/supertokens/web/src/index.ts +++ b/packages/auth-providers/supertokens/web/src/index.ts @@ -1 +1 @@ -export { createAuth } from './supertokens' +export { createAuth } from './supertokens.js' diff --git a/packages/auth-providers/supertokens/web/tsconfig.build.json b/packages/auth-providers/supertokens/web/tsconfig.build.json new file mode 100644 index 000000000000..e7e0fedfaba0 --- /dev/null +++ b/packages/auth-providers/supertokens/web/tsconfig.build.json @@ -0,0 +1,15 @@ +{ + "extends": "../../../../tsconfig.compilerOption.json", + "compilerOptions": { + "strict": true, + "rootDir": "src", + "outDir": "dist", + "tsBuildInfoFile": "./tsconfig.build.tsbuildinfo" + }, + "include": ["src"], + "references": [ + { + "path": "./../../../auth/tsconfig.build.json" + } + ] +} diff --git a/packages/auth-providers/supertokens/web/tsconfig.cjs.json b/packages/auth-providers/supertokens/web/tsconfig.cjs.json new file mode 100644 index 000000000000..a660cecf11ff --- /dev/null +++ b/packages/auth-providers/supertokens/web/tsconfig.cjs.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig.build.json", + "compilerOptions": { + "outDir": "dist/cjs", + "tsBuildInfoFile": "./tsconfig.cjs.tsbuildinfo" + } +} diff --git a/packages/auth-providers/supertokens/web/tsconfig.json b/packages/auth-providers/supertokens/web/tsconfig.json index 6f1a0dfbbe25..a31dd6f9faaa 100644 --- a/packages/auth-providers/supertokens/web/tsconfig.json +++ b/packages/auth-providers/supertokens/web/tsconfig.json @@ -2,9 +2,13 @@ "extends": "../../../../tsconfig.compilerOption.json", "compilerOptions": { "strict": true, - "rootDir": "src", - "outDir": "dist" + "module": "Node16", + "moduleResolution": "Node16" }, - "include": ["src"], - "references": [{ "path": "../../../auth/tsconfig.build.json" }] + "include": ["."], + "references": [ + { "path": "../../../auth/tsconfig.build.json" }, + { "path": "../../../framework-tools" } + ], + "exclude": ["dist", "node_modules", "**/__mocks__", "**/__tests__/fixtures"] } diff --git a/packages/auth/package.json b/packages/auth/package.json index 47adba1abad0..0b2be074ac1f 100644 --- a/packages/auth/package.json +++ b/packages/auth/package.json @@ -78,7 +78,7 @@ "build": "tsx ./build.ts && yarn build:types", "build:pack": "yarn pack -o redwoodjs-auth.tgz", "build:types": "tsc --build --verbose tsconfig.build.json", - "build:types-cjs": "tsc --build --verbose tsconfig.types-cjs.json", + "build:types-cjs": "tsc --build --verbose tsconfig.cjs.json", "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx\" --ignore dist --exec \"yarn build\"", "check:attw": "tsx ./attw.ts", "check:package": "concurrently npm:check:attw yarn:publint", diff --git a/packages/auth/tsconfig.cjs.json b/packages/auth/tsconfig.cjs.json new file mode 100644 index 000000000000..a660cecf11ff --- /dev/null +++ b/packages/auth/tsconfig.cjs.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig.build.json", + "compilerOptions": { + "outDir": "dist/cjs", + "tsBuildInfoFile": "./tsconfig.cjs.tsbuildinfo" + } +} diff --git a/packages/babel-config/src/api.ts b/packages/babel-config/src/api.ts index fe2072b17e3f..0ca53baf8629 100644 --- a/packages/babel-config/src/api.ts +++ b/packages/babel-config/src/api.ts @@ -20,6 +20,7 @@ import pluginRedwoodContextWrapping from './plugins/babel-plugin-redwood-context import pluginRedwoodDirectoryNamedImport from './plugins/babel-plugin-redwood-directory-named-import' import pluginRedwoodGraphqlOptionsExtract from './plugins/babel-plugin-redwood-graphql-options-extract' import pluginRedwoodImportDir from './plugins/babel-plugin-redwood-import-dir' +import pluginRedwoodJobPathInjector from './plugins/babel-plugin-redwood-job-path-injector' import pluginRedwoodOTelWrapping from './plugins/babel-plugin-redwood-otel-wrapping' export const TARGETS_NODE = '20.10' @@ -178,6 +179,12 @@ export const getApiSideBabelOverrides = ({ projectIsEsm = false } = {}) => { ], ], }, + // Add import names and paths to job definitions + { + // match */api/src/jobs/*.js|ts + test: /.+api(?:[\\|/])src(?:[\\|/])jobs(?:[\\|/]).+.(?:js|ts)$/, + plugins: [[pluginRedwoodJobPathInjector]], + }, ].filter(Boolean) return overrides as TransformOptions[] } diff --git a/packages/babel-config/src/plugins/babel-plugin-redwood-job-path-injector.ts b/packages/babel-config/src/plugins/babel-plugin-redwood-job-path-injector.ts new file mode 100644 index 000000000000..1af811227442 --- /dev/null +++ b/packages/babel-config/src/plugins/babel-plugin-redwood-job-path-injector.ts @@ -0,0 +1,110 @@ +import fsPath from 'node:path' + +import type { PluginObj, types } from '@babel/core' + +import { getPaths } from '@redwoodjs/project-config' + +// This plugin is responsible for injecting the import path and name of a job +// into the object that is passed to createJob. This is later used by adapters +// and workers to import the job. + +export default function ({ types: _t }: { types: typeof types }): PluginObj { + const paths = getPaths() + return { + name: 'babel-plugin-redwood-job-path-injector', + visitor: { + ExportNamedDeclaration(path, state) { + // Extract the variable declaration from the export + const declaration = path.node.declaration + if (!declaration) { + return + } + if (declaration.type !== 'VariableDeclaration') { + return + } + // Extract the variable declarator from the declaration + const declarator = declaration.declarations[0] + if (!declarator) { + return + } + if (declarator.type !== 'VariableDeclarator') { + return + } + + // Confirm that the init it a call expression + const init = declarator.init + if (!init) { + return + } + if (init.type !== 'CallExpression') { + return + } + // Confirm that the callee is a member expression + const callee = init.callee + if (!callee) { + return + } + if (callee.type !== 'MemberExpression') { + return + } + // The object is imported and so could be aliased so lets check the property + const property = callee.property + if (!property) { + return + } + if (property.type !== 'Identifier') { + return + } + if (property.name !== 'createJob') { + return + } + + // From this point on we're confident that we're looking at a createJob call + // so let's start throwing errors if we don't find what we expect + + // Extract the variable name from the declarator + const id = declarator.id + if (!id) { + return + } + if (id.type !== 'Identifier') { + return + } + + const filepath = state.file.opts.filename + if (!filepath) { + throw new Error('No file path was found in the state') + } + + const importName = id.name + const importPath = fsPath.relative(paths.api.jobs, filepath) + const importPathWithoutExtension = importPath.replace(/\.[^/.]+$/, '') + + // Get the first argument of the call expression + const firstArg = init.arguments[0] + if (!firstArg) { + throw new Error('No first argument found in the createJob call') + } + // confirm it's an object expression + if (firstArg.type !== 'ObjectExpression') { + throw new Error( + 'The first argument of the createJob call is not an object expression', + ) + } + // Add a property to the object expression + firstArg.properties.push( + _t.objectProperty( + _t.identifier('path'), + _t.stringLiteral(importPathWithoutExtension), + ), + ) + firstArg.properties.push( + _t.objectProperty( + _t.identifier('name'), + _t.stringLiteral(importName), + ), + ) + }, + }, + } +} diff --git a/packages/cli-helpers/package.json b/packages/cli-helpers/package.json index 974fbf958b4d..250b1e9f585f 100644 --- a/packages/cli-helpers/package.json +++ b/packages/cli-helpers/package.json @@ -47,7 +47,7 @@ "scripts": { "build": "tsx ./build.ts && yarn build:types", "build:pack": "yarn pack -o redwoodjs-cli-helpers.tgz", - "build:types": "tsc --build --verbose ./tsconfig.build.json ./tsconfig.build.cjs.json", + "build:types": "tsc --build --verbose ./tsconfig.build.json ./tsconfig.cjs.json", "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx\" --ignore dist --exec \"yarn build\"", "prepublishOnly": "NODE_ENV=production yarn build", "test": "vitest run", diff --git a/packages/cli-helpers/src/lib/colors.ts b/packages/cli-helpers/src/lib/colors.ts index b584b75baca1..f2c32abafa9b 100644 --- a/packages/cli-helpers/src/lib/colors.ts +++ b/packages/cli-helpers/src/lib/colors.ts @@ -18,5 +18,5 @@ export const colors = { tip: chalk.green, important: chalk.magenta, caution: chalk.red, - link: chalk.hex('#e8e8e8'), + link: chalk.underline, } diff --git a/packages/cli-helpers/tsconfig.build.cjs.json b/packages/cli-helpers/tsconfig.cjs.json similarity index 100% rename from packages/cli-helpers/tsconfig.build.cjs.json rename to packages/cli-helpers/tsconfig.cjs.json diff --git a/packages/cli/src/commands/generate/job/__tests__/__snapshots__/job.test.ts.snap b/packages/cli/src/commands/generate/job/__tests__/__snapshots__/job.test.ts.snap new file mode 100644 index 000000000000..5df426fc7edb --- /dev/null +++ b/packages/cli/src/commands/generate/job/__tests__/__snapshots__/job.test.ts.snap @@ -0,0 +1,38 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`Single word default files > creates a single word function file > Scenario snapshot 1`] = ` +"import type { ScenarioData } from '@redwoodjs/testing/api' + +export const standard = defineScenario({ + // Define the "fixture" to write into your test database here + // See guide: https://redwoodjs.com/docs/testing#scenarios +}) + +export type StandardScenario = ScenarioData +" +`; + +exports[`Single word default files > creates a single word function file > Test snapshot 1`] = ` +"import { SampleJob } from './SampleJob' + +describe('SampleJob', () => { + it('should not throw any errors', async () => { + await expect(SampleJob.perform()).resolves.not.toThrow() + }) +}) +" +`; + +exports[`Single word default files > creates a single word function file 1`] = ` +"import { jobs } from 'src/lib/jobs' + +export const SampleJob = jobs.createJob({ + queue: 'default', + perform: async () => { + jobs.logger.info('SampleJob is performing...') + }, +}) +" +`; + +exports[`multi-word files > creates a multi word function file 1`] = `undefined`; diff --git a/packages/cli/src/commands/generate/job/__tests__/job.test.ts b/packages/cli/src/commands/generate/job/__tests__/job.test.ts new file mode 100644 index 000000000000..fbec73c53610 --- /dev/null +++ b/packages/cli/src/commands/generate/job/__tests__/job.test.ts @@ -0,0 +1,86 @@ +globalThis.__dirname = __dirname +// Load shared mocks +import '../../../../lib/test' + +import path from 'path' + +import { describe, it, expect } from 'vitest' + +import * as jobGenerator from '../job' + +// Should be refactored as it's repeated +type WordFilesType = { [key: string]: string } + +describe('Single word default files', async () => { + const files: WordFilesType = await jobGenerator.files({ + name: 'Sample', + queueName: 'default', + tests: true, + typescript: true, + }) + + it('creates a single word function file', () => { + expect( + files[ + path.normalize('/path/to/project/api/src/jobs/SampleJob/SampleJob.ts') + ], + ).toMatchSnapshot() + + expect( + files[ + path.normalize( + '/path/to/project/api/src/jobs/SampleJob/SampleJob.test.ts', + ) + ], + ).toMatchSnapshot('Test snapshot') + + expect( + files[ + path.normalize( + '/path/to/project/api/src/jobs/SampleJob/SampleJob.scenarios.ts', + ) + ], + ).toMatchSnapshot('Scenario snapshot') + }) +}) + +describe('multi-word files', () => { + it('creates a multi word function file', async () => { + const multiWordDefaultFiles = await jobGenerator.files({ + name: 'send-mail', + queueName: 'default', + tests: false, + typescript: true, + }) + + expect( + multiWordDefaultFiles[ + path.normalize( + '/path/to/project/api/src/functions/SendMailJob/SendMailJob.js', + ) + ], + ).toMatchSnapshot() + }) +}) + +describe('generation of js files', async () => { + const jsFiles: WordFilesType = await jobGenerator.files({ + name: 'Sample', + queueName: 'default', + tests: true, + typescript: false, + }) + + it('returns tests, scenario and job file for JS', () => { + const fileNames = Object.keys(jsFiles) + expect(fileNames.length).toEqual(3) + + expect(fileNames).toEqual( + expect.arrayContaining([ + expect.stringContaining('SampleJob.js'), + expect.stringContaining('SampleJob.test.js'), + expect.stringContaining('SampleJob.scenarios.js'), + ]), + ) + }) +}) diff --git a/packages/cli/src/commands/generate/job/job.js b/packages/cli/src/commands/generate/job/job.js new file mode 100644 index 000000000000..fd1c30952a48 --- /dev/null +++ b/packages/cli/src/commands/generate/job/job.js @@ -0,0 +1,204 @@ +import path from 'node:path' +import { pathToFileURL } from 'node:url' + +import * as changeCase from 'change-case' +import execa from 'execa' +import { Listr } from 'listr2' +import terminalLink from 'terminal-link' + +import { recordTelemetryAttributes } from '@redwoodjs/cli-helpers' +import { errorTelemetry } from '@redwoodjs/telemetry' + +import { getPaths, transformTSToJS, writeFilesTask } from '../../../lib' +import c from '../../../lib/colors' +import { isTypeScriptProject } from '../../../lib/project' +import { prepareForRollback } from '../../../lib/rollback' +import { yargsDefaults } from '../helpers' +import { validateName, templateForComponentFile } from '../helpers' + +// Makes sure the name ends up looking like: `WelcomeNotice` even if the user +// called it `welcome-notice` or `welcomeNoticeJob` or anything else +const normalizeName = (name) => { + return changeCase.pascalCase(name).replace(/Job$/, '') +} + +export const files = async ({ + name, + queueName, + typescript: generateTypescript, + tests: generateTests = true, + ...rest +}) => { + const extension = generateTypescript ? '.ts' : '.js' + + const outputFiles = [] + + const jobName = normalizeName(name) + + const jobFiles = await templateForComponentFile({ + name: jobName, + componentName: jobName, + extension, + apiPathSection: 'jobs', + generator: 'job', + templatePath: 'job.ts.template', + templateVars: { name: jobName, queueName, ...rest }, + outputPath: path.join( + getPaths().api.jobs, + `${jobName}Job`, + `${jobName}Job${extension}`, + ), + }) + + outputFiles.push(jobFiles) + + if (generateTests) { + const testFile = await templateForComponentFile({ + name: jobName, + componentName: jobName, + extension, + apiPathSection: 'jobs', + generator: 'job', + templatePath: 'test.ts.template', + templateVars: { ...rest }, + outputPath: path.join( + getPaths().api.jobs, + `${jobName}Job`, + `${jobName}Job.test${extension}`, + ), + }) + + const scenarioFile = await templateForComponentFile({ + name: jobName, + componentName: jobName, + extension, + apiPathSection: 'jobs', + generator: 'job', + templatePath: 'scenarios.ts.template', + templateVars: { ...rest }, + outputPath: path.join( + getPaths().api.jobs, + `${jobName}Job`, + `${jobName}Job.scenarios${extension}`, + ), + }) + + outputFiles.push(testFile) + outputFiles.push(scenarioFile) + } + + return outputFiles.reduce(async (accP, [outputPath, content]) => { + const acc = await accP + + const template = generateTypescript + ? content + : await transformTSToJS(outputPath, content) + + return { + [outputPath]: template, + ...acc, + } + }, Promise.resolve({})) +} + +export const command = 'job ' +export const description = 'Generate a Background Job' + +// This could be built using createYargsForComponentGeneration; +// however, functions shouldn't have a `stories` option. createYargs... +// should be reversed to provide `yargsDefaults` as the default configuration +// and accept a configuration such as its CURRENT default to append onto a command. +export const builder = (yargs) => { + yargs + .positional('name', { + description: 'Name of the Job', + type: 'string', + }) + .option('typescript', { + alias: 'ts', + description: 'Generate TypeScript files', + type: 'boolean', + default: isTypeScriptProject(), + }) + .option('tests', { + description: 'Generate test files', + type: 'boolean', + default: true, + }) + .option('rollback', { + description: 'Revert all generator actions if an error occurs', + type: 'boolean', + default: true, + }) + .epilogue( + `Also see the ${terminalLink( + 'Redwood CLI Reference', + 'https://redwoodjs.com/docs/cli-commands#generate-job', + )}`, + ) + + // Add default options, includes '--typescript', '--javascript', '--force', ... + Object.entries(yargsDefaults).forEach(([option, config]) => { + yargs.option(option, config) + }) +} + +// This could be built using createYargsForComponentGeneration; +// however, we need to add a message after generating the function files +export const handler = async ({ name, force, ...rest }) => { + recordTelemetryAttributes({ + command: 'generate job', + force, + rollback: rest.rollback, + }) + + validateName(name) + + let queueName = 'default' + + // Attempt to read the first queue in the users job config file + try { + const jobsManagerFile = getPaths().api.distJobsConfig + const jobManager = await import(pathToFileURL(jobsManagerFile).href) + queueName = jobManager.jobs?.queues[0] ?? 'default' + } catch (_e) { + // We don't care if this fails because we'll fall back to 'default' + } + + const tasks = new Listr( + [ + { + title: 'Generating job files...', + task: async () => { + return writeFilesTask(await files({ name, queueName, ...rest }), { + overwriteExisting: force, + }) + }, + }, + { + title: 'Cleaning up...', + task: () => { + execa.commandSync('yarn', [ + 'eslint', + '--fix', + '--config', + `${getPaths().base}/node_modules/@redwoodjs/eslint-config/shared.js`, + `${getPaths().api.jobsConfig}`, + ]) + }, + }, + ], + { rendererOptions: { collapseSubtasks: false }, exitOnError: true }, + ) + + try { + if (rest.rollback && !force) { + prepareForRollback(tasks) + } + await tasks.run() + } catch (e) { + errorTelemetry(process.argv, e.message) + console.error(c.error(e.message)) + process.exit(e?.exitCode || 1) + } +} diff --git a/packages/cli/src/commands/generate/job/templates/job.ts.template b/packages/cli/src/commands/generate/job/templates/job.ts.template new file mode 100644 index 000000000000..8c049fb3fdab --- /dev/null +++ b/packages/cli/src/commands/generate/job/templates/job.ts.template @@ -0,0 +1,8 @@ +import { jobs } from 'src/lib/jobs' + +export const ${name}Job = jobs.createJob({ + queue: '${queueName}', + perform: async () => { + jobs.logger.info('${name}Job is performing...') + } +}) diff --git a/packages/cli/src/commands/generate/job/templates/scenarios.ts.template b/packages/cli/src/commands/generate/job/templates/scenarios.ts.template new file mode 100644 index 000000000000..d24ff747f6bb --- /dev/null +++ b/packages/cli/src/commands/generate/job/templates/scenarios.ts.template @@ -0,0 +1,8 @@ +import type { ScenarioData } from '@redwoodjs/testing/api' + +export const standard = defineScenario({ + // Define the "fixture" to write into your test database here + // See guide: https://redwoodjs.com/docs/testing#scenarios +}) + +export type StandardScenario = ScenarioData diff --git a/packages/cli/src/commands/generate/job/templates/test.ts.template b/packages/cli/src/commands/generate/job/templates/test.ts.template new file mode 100644 index 000000000000..9ff7a20db4df --- /dev/null +++ b/packages/cli/src/commands/generate/job/templates/test.ts.template @@ -0,0 +1,7 @@ +import { ${name}Job } from './${name}Job' + +describe('${name}Job', () => { + it('should not throw any errors', async () => { + await expect(${name}Job.perform()).resolves.not.toThrow() + }) +}) diff --git a/packages/cli/src/commands/jobs.js b/packages/cli/src/commands/jobs.js new file mode 100644 index 000000000000..b7e15face14f --- /dev/null +++ b/packages/cli/src/commands/jobs.js @@ -0,0 +1,22 @@ +export const command = 'jobs' +export const description = + 'Starts the RedwoodJob runner to process background jobs' + +export const builder = (yargs) => { + // Disable yargs parsing of commands and options because it's forwarded + // to rw-jobs + yargs + .strictOptions(false) + .strictCommands(false) + .strict(false) + .parserConfiguration({ + 'camel-case-expansion': false, + }) + .help(false) + .version(false) +} + +export const handler = async (options) => { + const { handler } = await import('./jobsHandler.js') + return handler(options) +} diff --git a/packages/cli/src/commands/jobsHandler.js b/packages/cli/src/commands/jobsHandler.js new file mode 100644 index 000000000000..1d92f6de1e7a --- /dev/null +++ b/packages/cli/src/commands/jobsHandler.js @@ -0,0 +1,36 @@ +import execa from 'execa' + +import { getPaths } from '../lib/index' + +export const handler = async ({ + _, + $0: _rw, + commands: _commands, + ...options +}) => { + const args = [_.pop()] + + for (const [name, value] of Object.entries(options)) { + // Allow both long and short form commands, e.g. --name and -n + args.push(name.length > 1 ? `--${name}` : `-${name}`) + args.push(value) + } + + let command = `yarn rw-jobs ${args.join(' ')}` + const originalLogLevel = process.env.LOG_LEVEL + process.env.LOG_LEVEL = originalLogLevel || 'warn' + + // make logs look nice in development (assume any env that's not prod is dev) + // that includes showing more verbose logs unless the user set otherwise + if (process.env.NODE_ENV !== 'production') { + command += ' | yarn rw-log-formatter' + process.env.LOG_LEVEL = originalLogLevel || 'debug' + } + + execa.commandSync(command, { + shell: true, + cwd: getPaths().base, + stdio: 'inherit', + cleanup: true, + }) +} diff --git a/packages/cli/src/commands/setup/jobs/jobs.js b/packages/cli/src/commands/setup/jobs/jobs.js new file mode 100644 index 000000000000..c1384861a71e --- /dev/null +++ b/packages/cli/src/commands/setup/jobs/jobs.js @@ -0,0 +1,32 @@ +import terminalLink from 'terminal-link' + +import { recordTelemetryAttributes } from '@redwoodjs/cli-helpers' + +export const command = 'jobs' +export const description = + 'Sets up the config file and parent directory for background jobs' + +export const builder = (yargs) => { + yargs + .option('force', { + alias: 'f', + default: false, + description: 'Overwrite existing files', + type: 'boolean', + }) + .epilogue( + `Also see the ${terminalLink( + 'Redwood CLI Reference', + 'https://redwoodjs.com/docs/cli-commands#setup-jobs', + )}`, + ) +} + +export const handler = async (options) => { + recordTelemetryAttributes({ + command: 'setup jobs', + force: options.force, + }) + const { handler } = await import('./jobsHandler.js') + return handler(options) +} diff --git a/packages/cli/src/commands/setup/jobs/jobsHandler.js b/packages/cli/src/commands/setup/jobs/jobsHandler.js new file mode 100644 index 000000000000..930d9a36ff77 --- /dev/null +++ b/packages/cli/src/commands/setup/jobs/jobsHandler.js @@ -0,0 +1,140 @@ +import * as fs from 'node:fs' +import * as path from 'node:path' + +import { getDMMF } from '@prisma/internals' +import { Listr } from 'listr2' + +import { addApiPackages } from '@redwoodjs/cli-helpers' + +import { getPaths, transformTSToJS, writeFile } from '../../../lib' +import c from '../../../lib/colors' +import { isTypeScriptProject } from '../../../lib/project' + +const MODEL_SCHEMA = ` +model BackgroundJob { + id Int @id @default(autoincrement()) + attempts Int @default(0) + handler String + queue String + priority Int + runAt DateTime? + lockedAt DateTime? + lockedBy String? + lastError String? + failedAt DateTime? + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt +} +` + +const getModelNames = async () => { + const schema = await getDMMF({ datamodelPath: getPaths().api.dbSchema }) + + return schema.datamodel.models.map((model) => model.name) +} + +// TODO(jgmw): This won't handle prisma with schema folder preview feature +const addDatabaseModel = () => { + const schema = fs.readFileSync(getPaths().api.dbSchema, 'utf-8') + + const schemaWithUser = schema + MODEL_SCHEMA + + fs.writeFileSync(getPaths().api.dbSchema, schemaWithUser) +} + +const tasks = async ({ force }) => { + const modelExists = (await getModelNames()).includes('BackgroundJob') + + const redwoodVersion = + require(path.join(getPaths().base, 'package.json')).devDependencies[ + '@redwoodjs/core' + ] ?? 'latest' + const jobsPackage = `@redwoodjs/jobs@${redwoodVersion}` + + return new Listr( + [ + { + title: 'Creating job database model...', + task: () => { + addDatabaseModel() + }, + skip: () => { + if (modelExists) { + return 'BackgroundJob model exists, skipping' + } + }, + }, + { + title: 'Creating config file in api/src/lib...', + task: async () => { + const isTs = isTypeScriptProject() + const outputExtension = isTs ? 'ts' : 'js' + const outputPath = path.join( + getPaths().api.lib, + `jobs.${outputExtension}`, + ) + let template = fs + .readFileSync( + path.resolve(__dirname, 'templates', 'jobs.ts.template'), + ) + .toString() + + if (!isTs) { + template = await transformTSToJS(outputPath, template) + } + + writeFile(outputPath, template, { + overwriteExisting: force, + }) + }, + }, + { + title: 'Creating jobs dir at api/src/jobs...', + task: () => { + try { + fs.mkdirSync(getPaths().api.jobs) + } catch (e) { + // ignore directory already existing + if (!e.message.match('file already exists')) { + throw new Error(e) + } + } + writeFile(path.join(getPaths().api.jobs, '.keep'), '', { + overwriteExisting: force, + }) + }, + }, + addApiPackages([jobsPackage]), + { + title: 'One more thing...', + task: (_ctx, task) => { + task.title = `One more thing... + + ${c.success('\nBackground jobs configured!\n')} + + ${!modelExists ? 'Migrate your database to finish setting up jobs:\n' : ''} + ${!modelExists ? c.warning('\n\u00A0\u00A0yarn rw prisma migrate dev\n') : ''} + + Generate jobs with: ${c.warning('yarn rw g job ')} + Execute jobs with: ${c.warning('yarn rw jobs work\n')} + + Check out the docs for more info: + ${c.link('https://docs.redwoodjs.com/docs/background-jobs')} + + ` + }, + }, + ], + { rendererOptions: { collapseSubtasks: false }, errorOnExist: true }, + ) +} + +export const handler = async ({ force }) => { + const t = await tasks({ force }) + + try { + await t.run() + } catch (e) { + console.log(c.error(e.message)) + } +} diff --git a/packages/cli/src/commands/setup/jobs/templates/jobs.ts.template b/packages/cli/src/commands/setup/jobs/templates/jobs.ts.template new file mode 100644 index 000000000000..c79669655368 --- /dev/null +++ b/packages/cli/src/commands/setup/jobs/templates/jobs.ts.template @@ -0,0 +1,32 @@ +// Setup for background jobs. Jobs themselves live in api/src/jobs +// Execute jobs in dev with `yarn rw jobs work` +// See https://docs.redwoodjs.com/docs/background-jobs + +import { PrismaAdapter, JobManager } from '@redwoodjs/jobs' + +import { db } from 'src/lib/db' +import { logger } from 'src/lib/logger' + +export const jobs = new JobManager({ + adapters: { + prisma: new PrismaAdapter({ db, logger }), + }, + queues: ['default'] as const, + logger, + workers: [ + { + adapter: 'prisma', + logger, + queue: '*', // watch all queues + count: 1, + maxAttempts: 24, + maxRuntime: 14_400, + deleteFailedJobs: false, + sleepDelay: 5, + }, + ], +}) + +export const later = jobs.createScheduler({ + adapter: 'prisma', +}) diff --git a/packages/cli/src/index.js b/packages/cli/src/index.js index c57e899f4c3d..cd2dc2502650 100644 --- a/packages/cli/src/index.js +++ b/packages/cli/src/index.js @@ -20,6 +20,7 @@ import * as execCommand from './commands/exec' import * as experimentalCommand from './commands/experimental' import * as generateCommand from './commands/generate' import * as infoCommand from './commands/info' +import * as jobsCommand from './commands/jobs' import * as lintCommand from './commands/lint' import * as prerenderCommand from './commands/prerender' import * as prismaCommand from './commands/prisma' @@ -210,6 +211,7 @@ async function runYargs() { .command(experimentalCommand) .command(generateCommand) .command(infoCommand) + .command(jobsCommand) .command(lintCommand) .command(prerenderCommand) .command(prismaCommand) diff --git a/packages/cli/src/lib/colors.js b/packages/cli/src/lib/colors.js index 0bbed5ee01bb..1ff32be4224f 100644 --- a/packages/cli/src/lib/colors.js +++ b/packages/cli/src/lib/colors.js @@ -22,5 +22,5 @@ export default { tip: chalk.green, important: chalk.magenta, caution: chalk.red, - link: chalk.hex('#e8e8e8'), + link: chalk.underline, } diff --git a/packages/cli/src/lib/test.js b/packages/cli/src/lib/test.js index ae6bbc792428..2062817682ab 100644 --- a/packages/cli/src/lib/test.js +++ b/packages/cli/src/lib/test.js @@ -42,6 +42,7 @@ vi.mock('@redwoodjs/project-config', async (importOriginal) => { ), // this folder generators: path.join(BASE_PATH, './api/generators'), src: path.join(BASE_PATH, './api/src'), + jobs: path.join(BASE_PATH, './api/src/jobs'), services: path.join(BASE_PATH, './api/src/services'), directives: path.join(BASE_PATH, './api/src/directives'), graphql: path.join(BASE_PATH, './api/src/graphql'), diff --git a/packages/cli/src/telemetry/index.js b/packages/cli/src/telemetry/index.js index b8160d21e0a5..4112deec144f 100644 --- a/packages/cli/src/telemetry/index.js +++ b/packages/cli/src/telemetry/index.js @@ -78,7 +78,6 @@ export async function startTelemetry() { for (const signal of ['SIGTERM', 'SIGINT', 'SIGHUP']) { process.on(signal, () => { if (process.listenerCount(signal) === 1) { - console.log(`Received ${signal} signal, exiting...`) process.exit() } }) diff --git a/packages/cli/tsconfig.json b/packages/cli/tsconfig.json index e473397e5900..3adc99b0895b 100644 --- a/packages/cli/tsconfig.json +++ b/packages/cli/tsconfig.json @@ -8,7 +8,7 @@ "include": ["src", "./testUtils.d.ts"], "exclude": ["**/__testfixtures__"], "references": [ - { "path": "../api-server" }, + { "path": "../api-server/tsconfig.build.json" }, { "path": "../cli-helpers" }, { "path": "../internal" }, { "path": "../prerender" }, diff --git a/packages/context/build.mts b/packages/context/build.mts index 1b29b6313cd0..1bc9652ae67f 100644 --- a/packages/context/build.mts +++ b/packages/context/build.mts @@ -1,28 +1,14 @@ -import { build, defaultBuildOptions } from '@redwoodjs/framework-tools' +import { buildCjs, buildEsm } from '@redwoodjs/framework-tools' import { generateTypesCjs, generateTypesEsm, insertCommonJsPackageJson, } from '@redwoodjs/framework-tools/generateTypes' -// ESM build and type generation -await build({ - buildOptions: { - ...defaultBuildOptions, - format: 'esm', - }, -}) +await buildEsm() await generateTypesEsm() -// CJS build, type generation, and package.json insert -await build({ - buildOptions: { - ...defaultBuildOptions, - outdir: 'dist/cjs', - }, -}) +await buildCjs() await generateTypesCjs() -await insertCommonJsPackageJson({ - buildFileUrl: import.meta.url, - cjsDir: 'dist/cjs', -}) + +await insertCommonJsPackageJson({ buildFileUrl: import.meta.url }) diff --git a/packages/context/package.json b/packages/context/package.json index 065caac15192..2a6fb90e6e0f 100644 --- a/packages/context/package.json +++ b/packages/context/package.json @@ -39,7 +39,7 @@ "scripts": { "build": "tsx ./build.mts", "build:pack": "yarn pack -o redwoodjs-context.tgz", - "build:types": "tsc --build --verbose ./tsconfig.json", + "build:types": "tsc --build --verbose ./tsconfig.build.json", "build:types-cjs": "tsc --build --verbose ./tsconfig.cjs.json", "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx\" --ignore dist --exec \"yarn build\"", "check:attw": "yarn attw -P", diff --git a/packages/context/tsconfig.build.json b/packages/context/tsconfig.build.json new file mode 100644 index 000000000000..52f021978079 --- /dev/null +++ b/packages/context/tsconfig.build.json @@ -0,0 +1,11 @@ +{ + "extends": "../../tsconfig.compilerOption.json", + "compilerOptions": { + "rootDir": "src", + "outDir": "dist", + "module": "Node16", + "moduleResolution": "Node16", + "tsBuildInfoFile": "./tsconfig.build.tsbuildinfo" + }, + "include": ["src"] +} diff --git a/packages/context/tsconfig.cjs.json b/packages/context/tsconfig.cjs.json index eaa211040f2f..a660cecf11ff 100644 --- a/packages/context/tsconfig.cjs.json +++ b/packages/context/tsconfig.cjs.json @@ -1,5 +1,5 @@ { - "extends": "./tsconfig.json", + "extends": "./tsconfig.build.json", "compilerOptions": { "outDir": "dist/cjs", "tsBuildInfoFile": "./tsconfig.cjs.tsbuildinfo" diff --git a/packages/context/tsconfig.json b/packages/context/tsconfig.json index db264235028b..ad7684d49aba 100644 --- a/packages/context/tsconfig.json +++ b/packages/context/tsconfig.json @@ -1,11 +1,11 @@ { "extends": "../../tsconfig.compilerOption.json", "compilerOptions": { - "rootDir": "src", "outDir": "dist", "module": "Node16", - "moduleResolution": "Node16", - "tsBuildInfoFile": "./tsconfig.tsbuildinfo" + "moduleResolution": "Node16" }, - "include": ["src"] + "include": ["."], + "exclude": ["dist", "node_modules", "**/__mocks__", "**/__fixtures__"], + "references": [{ "path": "../framework-tools" }] } diff --git a/packages/forms/.babelrc.js b/packages/forms/.babelrc.js deleted file mode 100644 index 3b2c815712d9..000000000000 --- a/packages/forms/.babelrc.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = { extends: '../../babel.config.js' } diff --git a/packages/forms/build.mts b/packages/forms/build.mts new file mode 100644 index 000000000000..16175a6725c0 --- /dev/null +++ b/packages/forms/build.mts @@ -0,0 +1,3 @@ +import { build } from '@redwoodjs/framework-tools' + +await build() diff --git a/packages/forms/package.json b/packages/forms/package.json index 1628bca05d4d..3c27f820a6c6 100644 --- a/packages/forms/package.json +++ b/packages/forms/package.json @@ -7,31 +7,38 @@ "directory": "packages/forms" }, "license": "MIT", - "main": "dist/index.js", - "types": "dist/index.d.ts", + "type": "commonjs", + "exports": { + ".": { + "default": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + } + } + }, + "main": "./dist/index.js", + "types": "./dist/index.d.ts", "files": [ "dist" ], "scripts": { - "build": "yarn build:js && yarn build:types", - "build:js": "babel src -d dist --extensions \".js,.jsx,.ts,.tsx\"", + "build": "tsx ./build.mts && yarn build:types", "build:pack": "yarn pack -o redwoodjs-forms.tgz", - "build:types": "tsc --build --verbose", + "build:types": "tsc --build --verbose ./tsconfig.build.json", "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx\" --ignore dist --exec \"yarn build\"", + "check:attw": "yarn rw-fwtools-attw", + "check:package": "concurrently npm:check:attw yarn:publint", "prepublishOnly": "NODE_ENV=production yarn build", "test": "vitest run", "test:watch": "vitest watch" }, "dependencies": { - "@babel/runtime-corejs3": "7.25.0", - "core-js": "3.38.0", "graphql": "16.9.0", "pascalcase": "1.0.0", "react-hook-form": "7.52.2" }, "devDependencies": { - "@babel/cli": "7.24.8", - "@babel/core": "^7.22.20", + "@redwoodjs/framework-tools": "workspace:*", "@testing-library/dom": "9.3.4", "@testing-library/jest-dom": "6.4.8", "@testing-library/react": "14.3.1", @@ -39,9 +46,12 @@ "@types/pascalcase": "1.0.3", "@types/react": "^18.2.55", "@types/react-dom": "^18.2.19", + "concurrently": "8.2.2", "nodemon": "3.1.4", + "publint": "0.2.10", "react": "19.0.0-rc-8269d55d-20240802", "react-dom": "19.0.0-rc-8269d55d-20240802", + "tsx": "4.17.0", "typescript": "5.5.4", "vitest": "2.0.5" }, diff --git a/packages/forms/tsconfig.build.json b/packages/forms/tsconfig.build.json new file mode 100644 index 000000000000..52f021978079 --- /dev/null +++ b/packages/forms/tsconfig.build.json @@ -0,0 +1,11 @@ +{ + "extends": "../../tsconfig.compilerOption.json", + "compilerOptions": { + "rootDir": "src", + "outDir": "dist", + "module": "Node16", + "moduleResolution": "Node16", + "tsBuildInfoFile": "./tsconfig.build.tsbuildinfo" + }, + "include": ["src"] +} diff --git a/packages/forms/tsconfig.json b/packages/forms/tsconfig.json index 7e5ce7eb48ed..4d825e8f17b6 100644 --- a/packages/forms/tsconfig.json +++ b/packages/forms/tsconfig.json @@ -1,8 +1,9 @@ { "extends": "../../tsconfig.compilerOption.json", "compilerOptions": { - "rootDir": "src", - "outDir": "dist" + "module": "Node16", + "moduleResolution": "Node16" }, - "include": ["src"] + "include": ["."], + "exclude": ["dist", "node_modules", "**/__mocks__", "**/__tests__/fixtures"] } diff --git a/packages/framework-tools/src/buildDefaults.ts b/packages/framework-tools/src/buildDefaults.ts index a908448e8307..2954f4fcfffc 100644 --- a/packages/framework-tools/src/buildDefaults.ts +++ b/packages/framework-tools/src/buildDefaults.ts @@ -85,6 +85,48 @@ export async function build({ } } +export function buildCjs() { + return build({ + buildOptions: { + ...defaultBuildOptions, + tsconfig: 'tsconfig.cjs.json', + outdir: 'dist/cjs', + }, + }) +} + +export function buildEsm() { + return build({ + buildOptions: { + ...defaultBuildOptions, + tsconfig: 'tsconfig.build.json', + format: 'esm', + }, + }) +} + +export function buildExternalCjs() { + return build({ + buildOptions: { + ...defaultBuildOptions, + tsconfig: 'tsconfig.cjs.json', + outdir: 'dist/cjs', + packages: 'external', + }, + }) +} + +export function buildExternalEsm() { + return build({ + buildOptions: { + ...defaultBuildOptions, + tsconfig: 'tsconfig.build.json', + format: 'esm', + packages: 'external', + }, + }) +} + interface CopyAssetsOptions { buildFileUrl: string patterns: string[] diff --git a/packages/framework-tools/src/generateTypes.ts b/packages/framework-tools/src/generateTypes.ts index f2029ab75262..6a6b0922892b 100644 --- a/packages/framework-tools/src/generateTypes.ts +++ b/packages/framework-tools/src/generateTypes.ts @@ -6,11 +6,15 @@ import type { PackageJson } from 'type-fest' import { $ } from 'zx' /** - * This function will run `yarn build:types-cjs` to generate the CJS type definitions. + * This function will run `yarn build:types-cjs` to generate the CJS type + * definitions. * - * It will also temporarily change the package.json file to have "type": "commonjs". This - * is the most reliable way to generate CJS type definitions. It will revert the package.json - * file back to its original state after the types have been generated - even if an error occurs. + * It will also temporarily change the package.json file to have + *`"type": "commonjs"`. This is the most reliable way to generate CJS type + * definitions[1]. It will revert the package.json file back to its original + * state after the types have been generated - even if an error occurs. + * + * [1]: https://github.com/arethetypeswrong/arethetypeswrong.github.io/issues/21#issuecomment-1494618930 */ export async function generateTypesCjs() { await $`cp package.json package.json.bak` @@ -33,7 +37,8 @@ export async function generateTypesCjs() { } /** - * This function will run `yarn build:types` to generate the ESM type definitions. + * This function will run `yarn build:types` to generate the ESM type + * definitions. */ export async function generateTypesEsm() { try { @@ -46,16 +51,16 @@ export async function generateTypesEsm() { } /** - * This function will insert a package.json file with "type": "commonjs" in the CJS build directory. - * This is necessary for the CJS build to be recognized as CommonJS modules when the root package.json - * file has "type": "module". + * This function will insert a package.json file with "type": "commonjs" in the + * CJS build directory. This is necessary for the CJS build to be recognized as + * CommonJS modules when the root package.json file has `"type": "module"`. */ export async function insertCommonJsPackageJson({ buildFileUrl, - cjsDir, + cjsDir = 'dist/cjs', }: { buildFileUrl: string - cjsDir: string + cjsDir?: string }) { const packageDir = path.dirname(fileURLToPath(buildFileUrl)) const packageJsonPath = path.join(packageDir, cjsDir, 'package.json') diff --git a/packages/graphql-server/tsconfig.json b/packages/graphql-server/tsconfig.json index 7b52929279dd..128bbc0f8f59 100644 --- a/packages/graphql-server/tsconfig.json +++ b/packages/graphql-server/tsconfig.json @@ -7,5 +7,8 @@ "moduleResolution": "Node16" }, "include": ["ambient.d.ts", "src/**/*"], - "references": [{ "path": "../api" }, { "path": "../context" }] + "references": [ + { "path": "../api" }, + { "path": "../context/tsconfig.build.json" } + ] } diff --git a/packages/jobs/README.md b/packages/jobs/README.md new file mode 100644 index 000000000000..f00fc1b5598a --- /dev/null +++ b/packages/jobs/README.md @@ -0,0 +1,3 @@ +# RedwoodJob + +Provides background job scheduling and processing for Redwood. diff --git a/packages/jobs/build.mts b/packages/jobs/build.mts new file mode 100644 index 000000000000..d107312c1aff --- /dev/null +++ b/packages/jobs/build.mts @@ -0,0 +1,32 @@ +import { + build, + buildEsm, + defaultBuildOptions, + defaultIgnorePatterns, +} from '@redwoodjs/framework-tools' +import { + generateTypesCjs, + generateTypesEsm, + insertCommonJsPackageJson, +} from '@redwoodjs/framework-tools/generateTypes' + +// ESM build and type generation +await buildEsm() +await generateTypesEsm() + +// CJS build, type generation, and package.json insert +await build({ + buildOptions: { + ...defaultBuildOptions, + outdir: 'dist/cjs', + tsconfig: 'tsconfig.cjs.json', + }, + entryPointOptions: { + // We don't need a CJS copy of the bins + ignore: [...defaultIgnorePatterns, './src/bins'], + }, +}) +await generateTypesCjs() +await insertCommonJsPackageJson({ + buildFileUrl: import.meta.url, +}) diff --git a/packages/jobs/package.json b/packages/jobs/package.json new file mode 100644 index 000000000000..104ed4ad688a --- /dev/null +++ b/packages/jobs/package.json @@ -0,0 +1,58 @@ +{ + "name": "@redwoodjs/jobs", + "version": "7.0.0", + "repository": { + "type": "git", + "url": "git+https://github.com/redwoodjs/redwood.git", + "directory": "packages/jobs" + }, + "license": "MIT", + "type": "module", + "exports": { + ".": { + "import": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + }, + "default": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + } + }, + "main": "./dist/cjs/index.js", + "module": "./dist/index.js", + "types": "./dist/index.d.ts", + "bin": { + "rw-jobs": "./dist/bins/rw-jobs.js", + "rw-jobs-worker": "./dist/bins/rw-jobs-worker.js" + }, + "files": [ + "dist" + ], + "scripts": { + "build": "tsx ./build.mts", + "build:pack": "yarn pack -o redwoodjs-jobs.tgz", + "build:types": "tsc --build --verbose ./tsconfig.build.json", + "build:types-cjs": "tsc --build --verbose ./tsconfig.cjs.json", + "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx\" --ignore dist --exec \"yarn build\"", + "check:attw": "yarn rw-fwtools-attw", + "check:package": "concurrently npm:check:attw yarn:publint", + "prepublishOnly": "NODE_ENV=production yarn build", + "test": "vitest run", + "test:watch": "vitest" + }, + "dependencies": { + "@redwoodjs/cli-helpers": "workspace:*", + "@redwoodjs/project-config": "workspace:*" + }, + "devDependencies": { + "@prisma/client": "5.18.0", + "@redwoodjs/framework-tools": "workspace:*", + "concurrently": "8.2.2", + "publint": "0.2.10", + "tsx": "4.17.0", + "typescript": "5.5.4", + "vitest": "2.0.5" + } +} diff --git a/packages/jobs/src/adapters/BaseAdapter/BaseAdapter.ts b/packages/jobs/src/adapters/BaseAdapter/BaseAdapter.ts new file mode 100644 index 000000000000..90e907d922f9 --- /dev/null +++ b/packages/jobs/src/adapters/BaseAdapter/BaseAdapter.ts @@ -0,0 +1,90 @@ +import { DEFAULT_LOGGER } from '../../consts.js' +import type { BaseJob, BasicLogger, PossibleBaseJob } from '../../types.js' + +// Arguments sent to an adapter to schedule a job +export interface SchedulePayload { + name: string + path: string + args: unknown[] + runAt: Date + queue: string + priority: number +} + +export interface FindArgs { + processName: string + maxRuntime: number + queues: string[] +} + +export interface BaseAdapterOptions { + logger?: BasicLogger +} + +export interface SuccessOptions { + job: TJob + deleteJob?: boolean +} + +export interface ErrorOptions { + job: TJob + error: Error +} + +export interface FailureOptions { + job: TJob + deleteJob?: boolean +} + +/** + * Base class for all job adapters. Provides a common interface for scheduling + * jobs. At a minimum, you must implement the `schedule` method in your adapter. + * + * Any object passed to the constructor is saved in `this.options` and should + * be used to configure your custom adapter. If `options.logger` is included + * you can access it via `this.logger` + */ +export abstract class BaseAdapter< + TOptions extends BaseAdapterOptions = BaseAdapterOptions, + TScheduleReturn = void | Promise, +> { + options: TOptions + logger: NonNullable + + constructor(options: TOptions) { + this.options = options + this.logger = options?.logger ?? DEFAULT_LOGGER + } + + // It's up to the subclass to decide what to return for these functions. + // The job engine itself doesn't care about the return value, but the user may + // want to do something with the result depending on the adapter type, so make + // it `any` to allow for the subclass to return whatever it wants. + + abstract schedule(payload: SchedulePayload): TScheduleReturn + + /** + * Find a single job that's eligible to run with the given args + */ + abstract find(args: FindArgs): PossibleBaseJob | Promise + + /** + * Called when a job has successfully completed + */ + abstract success(options: SuccessOptions): void | Promise + + /** + * Called when an attempt to run a job produced an error + */ + abstract error(options: ErrorOptions): void | Promise + + /** + * Called when a job has errored more than maxAttempts and will not be retried + */ + abstract failure(options: FailureOptions): void | Promise + + /** + * Clear all jobs from storage + */ + abstract clear(): void | Promise +} diff --git a/packages/jobs/src/adapters/BaseAdapter/__tests__/BaseAdapter.test.ts b/packages/jobs/src/adapters/BaseAdapter/__tests__/BaseAdapter.test.ts new file mode 100644 index 000000000000..ef122900f3d7 --- /dev/null +++ b/packages/jobs/src/adapters/BaseAdapter/__tests__/BaseAdapter.test.ts @@ -0,0 +1,34 @@ +import { describe, expect, it } from 'vitest' + +import { mockLogger } from '../../../core/__tests__/mocks.js' +import { BaseAdapter } from '../BaseAdapter.js' +import type { BaseAdapterOptions } from '../BaseAdapter.js' + +interface TestAdapterOptions extends BaseAdapterOptions { + foo: string +} + +class TestAdapter extends BaseAdapter { + schedule() {} + find() { + return undefined + } + success() {} + error() {} + failure() {} + clear() {} +} + +describe('constructor', () => { + it('saves options', () => { + const adapter = new TestAdapter({ foo: 'bar' }) + + expect(adapter.options.foo).toEqual('bar') + }) + + it('creates a separate instance var for any logger', () => { + const adapter = new TestAdapter({ foo: 'bar', logger: mockLogger }) + + expect(adapter.logger).toEqual(mockLogger) + }) +}) diff --git a/packages/jobs/src/adapters/PrismaAdapter/PrismaAdapter.ts b/packages/jobs/src/adapters/PrismaAdapter/PrismaAdapter.ts new file mode 100644 index 000000000000..05e036013173 --- /dev/null +++ b/packages/jobs/src/adapters/PrismaAdapter/PrismaAdapter.ts @@ -0,0 +1,278 @@ +import type { PrismaClient } from '@prisma/client' +import { camelCase } from 'change-case' + +import { DEFAULT_MAX_RUNTIME, DEFAULT_MODEL_NAME } from '../../consts.js' +import type { BaseJob } from '../../types.js' +import type { + BaseAdapterOptions, + SchedulePayload, + FindArgs, + SuccessOptions, + ErrorOptions, + FailureOptions, +} from '../BaseAdapter/BaseAdapter.js' +import { BaseAdapter } from '../BaseAdapter/BaseAdapter.js' + +import { ModelNameError } from './errors.js' + +export interface PrismaJob extends BaseJob { + id: number + handler: string + runAt: Date + lockedAt: Date + lockedBy: string + lastError: string | null + failedAt: Date | null + createdAt: Date + updatedAt: Date +} + +export interface PrismaAdapterOptions extends BaseAdapterOptions { + /** + * An instance of PrismaClient which will be used to talk to the database + */ + db: PrismaClient + + /** + * The name of the model in the Prisma schema that represents the job table. + * @default 'BackgroundJob' + */ + model?: string +} + +interface FailureData { + lockedAt: null + lockedBy: null + lastError: string + failedAt?: Date + runAt: Date | null +} + +/** + * Implements a job adapter using Prisma ORM. + * + * Assumes a table exists with the following schema (the table name can be customized): + * ```prisma + * model BackgroundJob { + * id Int \@id \@default(autoincrement()) + * attempts Int \@default(0) + * handler String + * queue String + * priority Int + * runAt DateTime + * lockedAt DateTime? + * lockedBy String? + * lastError String? + * failedAt DateTime? + * createdAt DateTime \@default(now()) + * updatedAt DateTime \@updatedAt + * } + * ``` + */ +export class PrismaAdapter extends BaseAdapter { + db: PrismaClient + model: string + accessor: PrismaClient[keyof PrismaClient] + provider: string + + constructor(options: PrismaAdapterOptions) { + super(options) + + this.db = options.db + + // name of the model as defined in schema.prisma + this.model = options.model || DEFAULT_MODEL_NAME + + // the function to call on `db` to make queries: `db.backgroundJob` + this.accessor = this.db[camelCase(this.model)] + + // the database provider type: 'sqlite' | 'postgresql' | 'mysql' + // not used currently, but may be useful in the future for optimizations + this.provider = options.db._activeProvider + + // validate that everything we need is available + if (!this.accessor) { + throw new ModelNameError(this.model) + } + } + + /** + * Finds the next job to run, locking it so that no other process can pick it + * The act of locking a job is dependant on the DB server, so we'll run some + * raw SQL to do it in each case—Prisma doesn't provide enough flexibility + * in their generated code to do this in a DB-agnostic way. + * + * TODO: there may be more optimized versions of the locking queries in + * Postgres and MySQL + */ + override async find({ + processName, + maxRuntime, + queues, + }: FindArgs): Promise { + const maxRuntimeExpire = new Date( + new Date().getTime() + (maxRuntime || DEFAULT_MAX_RUNTIME * 1000), + ) + + // This query is gnarly but not so bad once you know what it's doing. For a + // job to match it must: + // - have a runtAt in the past + // - is either not locked, or was locked more than `maxRuntime` ago, + // or was already locked by this exact process and never cleaned up + // - doesn't have a failedAt, meaning we will stop retrying + // Translates to: + // `((runAt <= ? AND (lockedAt IS NULL OR lockedAt < ?)) OR lockedBy = ?) AND failedAt IS NULL` + const where = { + AND: [ + { + OR: [ + { + AND: [ + { runAt: { lte: new Date() } }, + { + OR: [ + { lockedAt: null }, + { + lockedAt: { + lt: maxRuntimeExpire, + }, + }, + ], + }, + ], + }, + { lockedBy: processName }, + ], + }, + { failedAt: null }, + ], + } + + // If queues is ['*'] then skip, otherwise add a WHERE...IN for the array of + // queue names + const whereWithQueue = where + if (queues.length > 1 || queues[0] !== '*') { + Object.assign(whereWithQueue, { + AND: [...where.AND, { queue: { in: queues } }], + }) + } + + // Actually query the DB + const job = await this.accessor.findFirst({ + select: { id: true, attempts: true }, + where: whereWithQueue, + orderBy: [{ priority: 'asc' }, { runAt: 'asc' }], + take: 1, + }) + + if (job) { + // If one was found, try to lock it by updating the record with the + // same WHERE clause as above (if another locked in the meantime it won't + // find any record to update) + const whereWithQueueAndId = Object.assign(whereWithQueue, { + AND: [...whereWithQueue.AND, { id: job.id }], + }) + + // Update and increment the attempts count + const { count } = await this.accessor.updateMany({ + where: whereWithQueueAndId, + data: { + lockedAt: new Date(), + lockedBy: processName, + attempts: job.attempts + 1, + }, + }) + + // Assuming the update worked, return the full details of the job + if (count) { + const data = await this.accessor.findFirst({ where: { id: job.id } }) + const { name, path, args } = JSON.parse(data.handler) + return { ...data, name, path, args } + } + } + + // If we get here then there were either no jobs, or the one we found + // was locked by another worker + return undefined + } + + // Prisma queries are lazily evaluated and only sent to the db when they are + // awaited, so do the await here to ensure they actually run (if the user + // doesn't await the Promise then the queries will never be executed!) + override async success({ job, deleteJob }: SuccessOptions) { + this.logger.debug(`[RedwoodJob] Job ${job.id} success`) + + if (deleteJob) { + await this.accessor.delete({ where: { id: job.id } }) + } else { + await this.accessor.update({ + where: { id: job.id }, + data: { + lockedAt: null, + lockedBy: null, + lastError: null, + runAt: null, + }, + }) + } + } + + override async error({ job, error }: ErrorOptions) { + this.logger.debug(`[RedwoodJob] Job ${job.id} failure`) + + const data: FailureData = { + lockedAt: null, + lockedBy: null, + lastError: `${error.message}\n\n${error.stack}`, + runAt: null, + } + + data.runAt = new Date( + new Date().getTime() + this.backoffMilliseconds(job.attempts), + ) + + await this.accessor.update({ + where: { id: job.id }, + data, + }) + } + + // Job has had too many attempts, it has now permanently failed. + override async failure({ job, deleteJob }: FailureOptions) { + if (deleteJob) { + await this.accessor.delete({ where: { id: job.id } }) + } else { + await this.accessor.update({ + where: { id: job.id }, + data: { failedAt: new Date() }, + }) + } + } + + // Schedules a job by creating a new record in the background job table + override async schedule({ + name, + path, + args, + runAt, + queue, + priority, + }: SchedulePayload) { + await this.accessor.create({ + data: { + handler: JSON.stringify({ name, path, args }), + runAt, + queue, + priority, + }, + }) + } + + override async clear() { + await this.accessor.deleteMany() + } + + backoffMilliseconds(attempts: number) { + return 1000 * attempts ** 4 + } +} diff --git a/packages/jobs/src/adapters/PrismaAdapter/__tests__/PrismaAdapter.test.ts b/packages/jobs/src/adapters/PrismaAdapter/__tests__/PrismaAdapter.test.ts new file mode 100644 index 000000000000..0a623214fa9c --- /dev/null +++ b/packages/jobs/src/adapters/PrismaAdapter/__tests__/PrismaAdapter.test.ts @@ -0,0 +1,399 @@ +import type { PrismaClient } from '@prisma/client' +import { describe, expect, vi, it, beforeEach, afterEach } from 'vitest' + +import { DEFAULT_MODEL_NAME } from '../../../consts.js' +import { mockLogger } from '../../../core/__tests__/mocks.js' +import * as errors from '../errors.js' +import { PrismaAdapter } from '../PrismaAdapter.js' + +vi.useFakeTimers().setSystemTime(new Date('2024-01-01')) + +let mockDb: PrismaClient + +beforeEach(() => { + mockDb = { + _activeProvider: 'sqlite', + _runtimeDataModel: { + models: { + BackgroundJob: { + dbName: null, + }, + }, + }, + backgroundJob: { + create: vi.fn(), + delete: vi.fn(), + deleteMany: vi.fn(), + findFirst: vi.fn(), + update: vi.fn(), + updateMany: vi.fn(), + }, + } +}) + +afterEach(() => { + vi.resetAllMocks() +}) + +describe('constructor', () => { + it('defaults this.model name', () => { + const adapter = new PrismaAdapter({ db: mockDb, logger: mockLogger }) + + expect(adapter.model).toEqual(DEFAULT_MODEL_NAME) + }) + + it('can manually set this.model', () => { + mockDb._runtimeDataModel.models = { + Job: { + dbName: null, + }, + } + mockDb.job = {} + + const adapter = new PrismaAdapter({ + db: mockDb, + model: 'Job', + logger: mockLogger, + }) + + expect(adapter.model).toEqual('Job') + }) + + it('throws an error with a model name that does not exist', () => { + expect( + () => + new PrismaAdapter({ db: mockDb, model: 'FooBar', logger: mockLogger }), + ).toThrow(errors.ModelNameError) + }) + + it('sets this.accessor to the correct Prisma accessor', () => { + const adapter = new PrismaAdapter({ db: mockDb, logger: mockLogger }) + + expect(adapter.accessor).toEqual(mockDb.backgroundJob) + }) + + it('sets this.provider based on the active provider', () => { + const adapter = new PrismaAdapter({ db: mockDb, logger: mockLogger }) + + expect(adapter.provider).toEqual('sqlite') + }) +}) + +describe('schedule()', () => { + it('creates a job in the DB with required data', async () => { + const createSpy = vi + .spyOn(mockDb.backgroundJob, 'create') + .mockReturnValue({ id: 1 }) + const adapter = new PrismaAdapter({ db: mockDb, logger: mockLogger }) + await adapter.schedule({ + name: 'RedwoodJob', + path: 'RedwoodJob/RedwoodJob', + args: ['foo', 'bar'], + queue: 'default', + priority: 50, + runAt: new Date(), + }) + + expect(createSpy).toHaveBeenCalledWith({ + data: { + handler: JSON.stringify({ + name: 'RedwoodJob', + path: 'RedwoodJob/RedwoodJob', + args: ['foo', 'bar'], + }), + priority: 50, + queue: 'default', + runAt: new Date(), + }, + }) + }) +}) + +describe('find()', () => { + it('returns undefined if no job found', async () => { + vi.spyOn(mockDb.backgroundJob, 'findFirst').mockReturnValue(null) + const adapter = new PrismaAdapter({ db: mockDb, logger: mockLogger }) + const job = await adapter.find({ + processName: 'test', + maxRuntime: 1000, + queues: ['foobar'], + }) + + expect(job).toBeUndefined() + }) + + it('returns a job if found', async () => { + const mockJob = { + id: 1, + handler: JSON.stringify({ + name: 'TestJob', + path: 'TestJob/TestJob', + args: [], + }), + } + vi.spyOn(mockDb.backgroundJob, 'findFirst').mockReturnValue(mockJob) + vi.spyOn(mockDb.backgroundJob, 'updateMany').mockReturnValue({ count: 1 }) + const adapter = new PrismaAdapter({ db: mockDb, logger: mockLogger }) + const job = await adapter.find({ + processName: 'test', + maxRuntime: 1000, + queues: ['default'], + }) + + expect(job).toEqual({ + ...mockJob, + name: 'TestJob', + path: 'TestJob/TestJob', + args: [], + }) + }) + + it('increments the `attempts` count on the found job', async () => { + const mockJob = { + id: 1, + handler: JSON.stringify({ + name: 'TestJob', + path: 'TestJob/TestJob', + args: [], + }), + attempts: 0, + } + vi.spyOn(mockDb.backgroundJob, 'findFirst').mockReturnValue(mockJob) + const updateSpy = vi + .spyOn(mockDb.backgroundJob, 'updateMany') + .mockReturnValue({ count: 1 }) + const adapter = new PrismaAdapter({ db: mockDb, logger: mockLogger }) + await adapter.find({ + processName: 'test', + maxRuntime: 1000, + queues: ['default'], + }) + + expect(updateSpy).toHaveBeenCalledWith( + expect.objectContaining({ + data: expect.objectContaining({ attempts: 1 }), + }), + ) + }) + + it('locks the job for the current process', async () => { + const mockJob = { + id: 1, + attempts: 0, + handler: JSON.stringify({ + name: 'TestJob', + path: 'TestJob/TestJob', + args: [], + }), + } + vi.spyOn(mockDb.backgroundJob, 'findFirst').mockReturnValue(mockJob) + const updateSpy = vi + .spyOn(mockDb.backgroundJob, 'updateMany') + .mockReturnValue({ count: 1 }) + const adapter = new PrismaAdapter({ db: mockDb, logger: mockLogger }) + await adapter.find({ + processName: 'test-process', + maxRuntime: 1000, + queues: ['default'], + }) + + expect(updateSpy).toHaveBeenCalledWith( + expect.objectContaining({ + data: expect.objectContaining({ lockedBy: 'test-process' }), + }), + ) + }) + + it('locks the job with a current timestamp', async () => { + const mockJob = { + id: 1, + attempts: 0, + handler: JSON.stringify({ + name: 'TestJob', + path: 'TestJob/TestJob', + args: [], + }), + } + vi.spyOn(mockDb.backgroundJob, 'findFirst').mockReturnValue(mockJob) + const updateSpy = vi + .spyOn(mockDb.backgroundJob, 'updateMany') + .mockReturnValue({ count: 1 }) + const adapter = new PrismaAdapter({ db: mockDb, logger: mockLogger }) + await adapter.find({ + processName: 'test-process', + maxRuntime: 1000, + queues: ['default'], + }) + + expect(updateSpy).toHaveBeenCalledWith( + expect.objectContaining({ + data: expect.objectContaining({ lockedAt: new Date() }), + }), + ) + }) +}) + +const mockPrismaJob = { + id: 1, + handler: '', + attempts: 10, + runAt: new Date(), + lockedAt: new Date(), + lockedBy: 'test-process', + lastError: null, + failedAt: null, + createdAt: new Date(), + updatedAt: new Date(), + name: 'TestJob', + path: 'TestJob/TestJob', + args: [], +} + +describe('success()', () => { + it('deletes the job from the DB if option set', async () => { + const spy = vi.spyOn(mockDb.backgroundJob, 'delete') + const adapter = new PrismaAdapter({ + db: mockDb, + logger: mockLogger, + }) + await adapter.success({ job: mockPrismaJob, deleteJob: true }) + + expect(spy).toHaveBeenCalledWith({ where: { id: 1 } }) + }) + + it('updates the job if option not set', async () => { + const spy = vi.spyOn(mockDb.backgroundJob, 'update') + const adapter = new PrismaAdapter({ + db: mockDb, + logger: mockLogger, + }) + await adapter.success({ job: mockPrismaJob, deleteJob: false }) + + expect(spy).toHaveBeenCalledWith({ + where: { id: mockPrismaJob.id }, + data: { + lockedAt: null, + lockedBy: null, + lastError: null, + runAt: null, + }, + }) + }) +}) + +describe('error()', () => { + it('updates the job by id', async () => { + const spy = vi.spyOn(mockDb.backgroundJob, 'update') + const adapter = new PrismaAdapter({ db: mockDb, logger: mockLogger }) + await adapter.error({ job: mockPrismaJob, error: new Error('test error') }) + + expect(spy).toHaveBeenCalledWith( + expect.objectContaining({ where: { id: 1 } }), + ) + }) + + it('clears the lock fields', async () => { + const spy = vi.spyOn(mockDb.backgroundJob, 'update') + const adapter = new PrismaAdapter({ db: mockDb, logger: mockLogger }) + await adapter.error({ job: mockPrismaJob, error: new Error('test error') }) + + expect(spy).toHaveBeenCalledWith( + expect.objectContaining({ + data: expect.objectContaining({ lockedAt: null, lockedBy: null }), + }), + ) + }) + + it('reschedules the job at a designated backoff time', async () => { + const spy = vi.spyOn(mockDb.backgroundJob, 'update') + const adapter = new PrismaAdapter({ db: mockDb, logger: mockLogger }) + await adapter.error({ job: mockPrismaJob, error: new Error('test error') }) + + expect(spy).toHaveBeenCalledWith( + expect.objectContaining({ + data: expect.objectContaining({ + runAt: new Date(new Date().getTime() + 1000 * 10 ** 4), + }), + }), + ) + }) + + it('records the error', async () => { + const spy = vi.spyOn(mockDb.backgroundJob, 'update') + const adapter = new PrismaAdapter({ db: mockDb, logger: mockLogger }) + await adapter.error({ job: mockPrismaJob, error: new Error('test error') }) + + expect(spy).toHaveBeenCalledWith( + expect.objectContaining({ + data: expect.objectContaining({ + lastError: expect.stringContaining('test error'), + }), + }), + ) + }) +}) + +describe('failure()', () => { + it('marks the job as failed if max attempts reached', async () => { + const spy = vi.spyOn(mockDb.backgroundJob, 'update') + const adapter = new PrismaAdapter({ db: mockDb, logger: mockLogger }) + await adapter.failure({ job: mockPrismaJob, deleteJob: false }) + + expect(spy).toHaveBeenCalledWith( + expect.objectContaining({ + data: expect.objectContaining({ + failedAt: new Date(), + }), + }), + ) + }) + + it('deletes the job if option is set', async () => { + const spy = vi.spyOn(mockDb.backgroundJob, 'delete') + const adapter = new PrismaAdapter({ db: mockDb, logger: mockLogger }) + await adapter.failure({ job: mockPrismaJob, deleteJob: true }) + + expect(spy).toHaveBeenCalledWith({ where: { id: 1 } }) + }) +}) + +describe('clear()', () => { + it('deletes all jobs from the DB', async () => { + const spy = vi.spyOn(mockDb.backgroundJob, 'deleteMany') + + const adapter = new PrismaAdapter({ db: mockDb, logger: mockLogger }) + await adapter.clear() + + expect(spy).toHaveBeenCalledOnce() + }) +}) + +describe('backoffMilliseconds()', () => { + it('returns the number of milliseconds to wait for the next run', () => { + expect( + new PrismaAdapter({ db: mockDb, logger: mockLogger }).backoffMilliseconds( + 0, + ), + ).toEqual(0) + expect( + new PrismaAdapter({ db: mockDb, logger: mockLogger }).backoffMilliseconds( + 1, + ), + ).toEqual(1000) + expect( + new PrismaAdapter({ db: mockDb, logger: mockLogger }).backoffMilliseconds( + 2, + ), + ).toEqual(16000) + expect( + new PrismaAdapter({ db: mockDb, logger: mockLogger }).backoffMilliseconds( + 3, + ), + ).toEqual(81000) + expect( + new PrismaAdapter({ db: mockDb, logger: mockLogger }).backoffMilliseconds( + 20, + ), + ).toEqual(160000000) + }) +}) diff --git a/packages/jobs/src/adapters/PrismaAdapter/errors.ts b/packages/jobs/src/adapters/PrismaAdapter/errors.ts new file mode 100644 index 000000000000..9a9fc95f1640 --- /dev/null +++ b/packages/jobs/src/adapters/PrismaAdapter/errors.ts @@ -0,0 +1,8 @@ +import { RedwoodJobError } from '../../errors.js' + +// Thrown when a given model name isn't actually available in the PrismaClient +export class ModelNameError extends RedwoodJobError { + constructor(name: string) { + super(`Model \`${name}\` not found in PrismaClient`) + } +} diff --git a/packages/jobs/src/bins/__tests__/rw-jobs-worker.test.ts b/packages/jobs/src/bins/__tests__/rw-jobs-worker.test.ts new file mode 100644 index 000000000000..90ed66b351e4 --- /dev/null +++ b/packages/jobs/src/bins/__tests__/rw-jobs-worker.test.ts @@ -0,0 +1,111 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest' + +import { MockAdapter, mockLogger } from '../../core/__tests__/mocks.js' +import { JobManager } from '../../core/JobManager.js' +import { Worker } from '../../core/Worker.js' +import { getWorker, processName } from '../rw-jobs-worker.js' + +vi.mock('@redwoodjs/cli-helpers/loadEnvFiles', () => { + return { + loadEnvFiles: () => {}, + } +}) + +const mocks = vi.hoisted(() => { + return { + loadJobsManager: vi.fn(), + } +}) + +vi.mock('../../loaders.js', () => { + return { + loadJobsManager: mocks.loadJobsManager, + } +}) + +describe('processName', () => { + it('sets the process title for a single queue', () => { + const title = processName({ id: 1, queues: 'default' }) + + expect(title).toEqual('rw-jobs-worker.default.1') + }) + + it('sets the process title for an array of queues', () => { + const title = processName({ id: 1, queues: ['default', 'email'] }) + + expect(title).toEqual('rw-jobs-worker.default-email.1') + }) +}) + +describe('getWorker', () => { + beforeEach(() => { + vi.resetAllMocks() + }) + + it('returns an instance of Worker', async () => { + mocks.loadJobsManager.mockImplementation( + () => + new JobManager({ + adapters: { + test: new MockAdapter(), + }, + logger: mockLogger, + queues: ['default'], + workers: [ + { + adapter: 'test', + logger: mockLogger, + queue: '*', + count: 1, + }, + ], + }), + ) + + const worker = await getWorker({ + index: 0, + id: 0, + workoff: false, + clear: false, + }) + + expect(worker).toBeInstanceOf(Worker) + }) + + it('calls getWorker on the manager with the proper values', async () => { + const mockAdapter = new MockAdapter() + mocks.loadJobsManager.mockImplementation( + () => + new JobManager({ + adapters: { + test: mockAdapter, + }, + logger: mockLogger, + queues: ['default'], + workers: [ + { + adapter: 'test', + logger: mockLogger, + queue: '*', + count: 1, + }, + ], + }), + ) + const spy = vi.spyOn(JobManager.prototype, 'createWorker') + + await getWorker({ + index: 0, + id: 0, + workoff: false, + clear: false, + }) + + expect(spy).toHaveBeenCalledWith({ + index: 0, + workoff: false, + clear: false, + processName: 'rw-jobs-worker.*.0', + }) + }) +}) diff --git a/packages/jobs/src/bins/__tests__/rw-jobs.test.ts b/packages/jobs/src/bins/__tests__/rw-jobs.test.ts new file mode 100644 index 000000000000..9129c778200c --- /dev/null +++ b/packages/jobs/src/bins/__tests__/rw-jobs.test.ts @@ -0,0 +1,125 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest' + +import { mockLogger } from '../../core/__tests__/mocks.js' +import { buildNumWorkers, startWorkers } from '../rw-jobs.js' + +vi.mock('@redwoodjs/cli-helpers/loadEnvFiles', () => { + return { + loadEnvFiles: () => {}, + } +}) + +const mocks = vi.hoisted(() => { + return { + fork: vi.fn(), + } +}) + +vi.mock('node:child_process', () => { + return { + fork: mocks.fork, + } +}) + +describe('buildNumWorkers()', () => { + it('turns a single worker config into an array of arrays', () => { + const config = [ + { + count: 1, + }, + ] + + const result = buildNumWorkers(config) + + expect(result).toEqual([[0, 0]]) + }) + + it('turns a single worker config with more than 1 count an array of arrays', () => { + const config = [ + { + count: 2, + }, + ] + + const result = buildNumWorkers(config) + + expect(result).toEqual([ + [0, 0], + [0, 1], + ]) + }) + + it('turns multiple worker configs into an array of arrays', () => { + const config = [ + { + count: 2, + }, + { + count: 3, + }, + ] + + const result = buildNumWorkers(config) + + expect(result).toEqual([ + [0, 0], + [0, 1], + [1, 0], + [1, 1], + [1, 2], + ]) + }) +}) + +describe('startWorkers()', () => { + beforeEach(() => { + vi.resetAllMocks() + }) + + it('forks a single worker', () => { + const mockWorker = { + on: () => {}, + } + mocks.fork.mockImplementation(() => mockWorker) + + startWorkers({ numWorkers: [[0, 0]], logger: mockLogger }) + + // single worker only + expect(mocks.fork).toHaveBeenCalledWith( + expect.stringContaining('rw-jobs-worker.js'), + ['--index', '0', '--id', '0'], + expect.objectContaining({ + detached: false, + stdio: 'inherit', + }), + ) + }) + + it('forks multiple workers', () => { + const mockWorker = { + on: () => {}, + } + mocks.fork.mockImplementation(() => mockWorker) + + startWorkers({ + numWorkers: [ + [0, 0], + [0, 1], + ], + logger: mockLogger, + }) + + // first worker + expect(mocks.fork).toHaveBeenCalledWith( + expect.stringContaining('rw-jobs-worker.js'), + ['--index', '0', '--id', '0'], + expect.any(Object), + ) + // second worker + expect(mocks.fork).toHaveBeenCalledWith( + expect.stringContaining('rw-jobs-worker.js'), + ['--index', '0', '--id', '1'], + expect.any(Object), + ) + }) +}) diff --git a/packages/jobs/src/bins/rw-jobs-worker.ts b/packages/jobs/src/bins/rw-jobs-worker.ts new file mode 100755 index 000000000000..f313ef5890e2 --- /dev/null +++ b/packages/jobs/src/bins/rw-jobs-worker.ts @@ -0,0 +1,133 @@ +#!/usr/bin/env node + +// The process that actually starts an instance of Worker to process jobs. +// Can be run independently with `yarn rw-jobs-worker` but by default is forked +// by `yarn rw-jobs` and either monitored, or detached to run independently. +import process from 'node:process' + +import { hideBin } from 'yargs/helpers' +import yargs from 'yargs/yargs' + +import { loadEnvFiles } from '@redwoodjs/cli-helpers/loadEnvFiles' + +import { PROCESS_TITLE_PREFIX } from '../consts.js' +import type { Worker } from '../core/Worker.js' +import { WorkerConfigIndexNotFoundError } from '../errors.js' +import { loadJobsManager } from '../loaders.js' + +loadEnvFiles() + +const parseArgs = (argv: string[]) => { + return yargs(hideBin(argv)) + .usage( + 'Starts a single RedwoodJob worker to process background jobs\n\nUsage: $0 [options]', + ) + .option('index', { + type: 'number', + required: true, + description: + 'The index of the `workers` array from the exported `jobs` config to use to configure this worker', + }) + .option('id', { + type: 'number', + required: true, + description: + 'The worker count id to identify this worker. ie: if you had `count: 2` in your worker config, you would have two workers with ids 0 and 1', + }) + .option('workoff', { + type: 'boolean', + default: false, + description: 'Work off all jobs in the queue(s) and exit', + }) + .option('clear', { + type: 'boolean', + default: false, + description: 'Remove all jobs in all queues and exit', + }) + .help().argv +} + +export const processName = ({ + id, + queues, +}: { + id: number + queues: string | string[] +}) => { + return `${PROCESS_TITLE_PREFIX}.${[queues].flat().join('-')}.${id}` +} + +const setupSignals = ({ worker }: { worker: Worker }) => { + // if the parent itself receives a ctrl-c it'll pass that to the workers. + // workers will exit gracefully by setting `forever` to `false` which will tell + // it not to pick up a new job when done with the current one + process.on('SIGINT', () => { + worker.logger.warn( + `[${process.title}] SIGINT received at ${new Date().toISOString()}, finishing work...`, + ) + worker.forever = false + }) + + // if the parent itself receives a ctrl-c more than once it'll send SIGTERM + // instead in which case we exit immediately no matter what state the worker is + // in + process.on('SIGTERM', () => { + worker.logger.warn( + `[${process.title}] SIGTERM received at ${new Date().toISOString()}, exiting now!`, + ) + process.exit(0) + }) +} + +export const getWorker = async ({ + index, + id, + clear, + workoff, +}: { + index: number + id: number + clear: boolean + workoff: boolean +}) => { + let manager + + try { + manager = await loadJobsManager() + } catch (e) { + console.error(e) + process.exit(1) + } + + const workerConfig = manager.workers[index] + if (!workerConfig) { + throw new WorkerConfigIndexNotFoundError(index) + } + + return manager.createWorker({ + index, + clear, + workoff, + processName: processName({ id, queues: workerConfig.queue }), + }) +} + +const main = async () => { + const { index, id, clear, workoff } = await parseArgs(process.argv) + + const worker = await getWorker({ index, id, clear, workoff }) + + process.title = processName({ id, queues: worker.queues }) + + worker.run().then(() => { + worker.logger.info(`[${process.title}] Worker finished, shutting down.`) + process.exit(0) + }) + + setupSignals({ worker }) +} + +// Don't actaully run the worker if we're in a test environment +if (process.env.NODE_ENV !== 'test') { + main() +} diff --git a/packages/jobs/src/bins/rw-jobs.ts b/packages/jobs/src/bins/rw-jobs.ts new file mode 100755 index 000000000000..6f19327b05f8 --- /dev/null +++ b/packages/jobs/src/bins/rw-jobs.ts @@ -0,0 +1,327 @@ +#!/usr/bin/env node + +// Coordinates the worker processes: running attached in [work] mode or +// detaching in [start] mode. + +import type { ChildProcess } from 'node:child_process' +import { fork, exec } from 'node:child_process' +import console from 'node:console' +import path from 'node:path' +import process from 'node:process' +import { setTimeout } from 'node:timers' + +import { hideBin } from 'yargs/helpers' +import yargs from 'yargs/yargs' + +import { loadEnvFiles } from '@redwoodjs/cli-helpers/loadEnvFiles' + +import { DEFAULT_LOGGER, PROCESS_TITLE_PREFIX } from '../consts.js' +import { loadJobsManager } from '../loaders.js' +import type { Adapters, BasicLogger, WorkerConfig } from '../types.js' + +export type NumWorkersConfig = [number, number][] + +loadEnvFiles() + +process.title = 'rw-jobs' + +const parseArgs = (argv: string[]) => { + const commandString = hideBin(argv) + + if (commandString.length === 1 && commandString[0] === 'jobs') { + commandString.shift() + } + + const parsed: Record = yargs(commandString) + .usage( + 'Starts the RedwoodJob runner to process background jobs\n\nUsage: rw jobs [options]', + ) + .command('work', 'Start a worker and process jobs') + .command('workoff', 'Start a worker and exit after all jobs processed') + .command('start', 'Start workers in daemon mode') + .command('stop', 'Stop any daemonized job workers') + .command('restart', 'Stop and start any daemonized job workers') + .command('clear', 'Clear the job queue') + .demandCommand(1, 'You must specify a mode to start in') + .example( + 'rw jobs work', + 'Start the job workers using the job config and work on jobs until manually stopped', + ) + .example( + 'rw jobs start', + 'Start the job workers using the job config and detach, running in daemon mode', + ) + .help() + .parse(commandString, (_err: any, _argv: any, output: any) => { + if (output) { + const newOutput = output.replaceAll('rw-jobs.js', 'rw jobs') + console.log(newOutput) + } + }) + + return { command: parsed._[0] } +} + +// Builds up an array of arrays, with the worker config index and id based on +// how many workers should use this config. For the following config: +// +// { +// workers: [ +// { +// adapter: 'prisma', +// count: 2, +// queue: 'default', +// }, +// { +// adapter: 'prisma', +// count: 1, +// queue: 'email', +// }, +// ] +// } +// +// The output would be: +// +// [ +// [0, 0], // first array element, first worker +// [0, 1], // first array element, second worker +// [1, 0], // second array element, first worker +// ] +export const buildNumWorkers = (config: any) => { + const workers: NumWorkersConfig = [] + + config.map((worker: any, index: number) => { + for (let id = 0; id < worker.count; id++) { + workers.push([index, id]) + } + }) + + return workers +} + +export const startWorkers = ({ + numWorkers, + detach = false, + workoff = false, + logger, +}: { + numWorkers: NumWorkersConfig + detach?: boolean + workoff?: boolean + logger: BasicLogger +}) => { + logger.warn(`Starting ${numWorkers.length} worker(s)...`) + + return numWorkers.map(([index, id]) => { + // list of args to send to the forked worker script + const workerArgs: string[] = [] + workerArgs.push('--index', index.toString()) + workerArgs.push('--id', id.toString()) + + if (workoff) { + workerArgs.push('--workoff') + } + + // fork the worker process + const worker = fork( + path.join(import.meta.dirname, 'rw-jobs-worker.js'), + workerArgs, + { + detached: detach, + stdio: detach ? 'ignore' : 'inherit', + env: process.env, + }, + ) + + if (detach) { + worker.unref() + } else { + // children stay attached so watch for their exit before exiting parent + worker.on('exit', (_code) => {}) + } + + return worker + }) +} + +// TODO add support for stopping with SIGTERM or SIGKILL? +const stopWorkers = async ({ + numWorkers, + signal = 'SIGINT', + logger, +}: { + numWorkers: NumWorkersConfig + signal: string + logger: BasicLogger +}) => { + logger.warn( + `Stopping ${numWorkers.length} worker(s) gracefully (${signal})...`, + ) + + const processIds = await findWorkerProcesses() + + if (processIds.length === 0) { + logger.warn(`No running workers found.`) + return + } + + for (const processId of processIds) { + logger.info(`Stopping process id ${processId}...`) + process.kill(processId, signal) + + // wait for the process to actually exit before going to next iteration + while ((await findWorkerProcesses(processId)).length) { + await new Promise((resolve) => setTimeout(resolve, 250)) + } + } +} + +const clearQueue = ({ logger }: { logger: BasicLogger }) => { + logger.warn(`Starting worker to clear job queue...`) + fork(path.join(import.meta.dirname, 'rw-jobs-worker.js'), ['--clear']) +} + +const signalSetup = ({ + workers, + logger, +}: { + workers: ChildProcess[] + logger: BasicLogger +}) => { + // Keep track of how many times the user has pressed ctrl-c + let sigtermCount = 0 + + // If the parent receives a ctrl-c, tell each worker to gracefully exit. + // If the parent receives a second ctrl-c, exit immediately. + process.on('SIGINT', () => { + sigtermCount++ + let message = + 'SIGINT received: shutting down workers gracefully (press Ctrl-C again to exit immediately)...' + + if (sigtermCount > 1) { + message = 'SIGINT received again, exiting immediately...' + } + + logger.info(message) + + workers.forEach((worker) => { + if (sigtermCount > 1) { + worker.kill('SIGTERM') + } else { + worker.kill('SIGINT') + } + }) + }) +} + +// Find the process id of a worker by its title +const findWorkerProcesses = async (id?: number): Promise => { + return new Promise(function (resolve, reject) { + const plat = process.platform + const cmd = + plat === 'win32' + ? 'tasklist' + : plat === 'darwin' + ? 'ps -ax | grep ' + PROCESS_TITLE_PREFIX + : plat === 'linux' + ? 'ps -A' + : '' + if (cmd === '') { + resolve([]) + } + exec(cmd, function (err, stdout) { + if (err) { + reject(err) + } + + const list = stdout.trim().split('\n') + const matches = list.filter((line) => { + if (plat == 'darwin' || plat == 'linux') { + return !line.match('grep') + } + return true + }) + + // no job workers running + if (matches.length === 0) { + resolve([]) + } + + const pids = matches.map((line) => parseInt(line.split(' ')[0])) + + if (id) { + // will return the single job worker process ID if still running + resolve(pids.filter((pid) => pid === id)) + } else { + // return all job worker process IDs + resolve(pids) + } + }) + }) +} + +const main = async () => { + const { command } = parseArgs(process.argv) + let jobsConfig + + try { + jobsConfig = await loadJobsManager() + } catch (e) { + console.error(e) + process.exit(1) + } + + const workerConfig: WorkerConfig[] = jobsConfig.workers + const numWorkers = buildNumWorkers(workerConfig) + const logger = jobsConfig.logger ?? DEFAULT_LOGGER + + logger.warn(`Starting RedwoodJob Runner at ${new Date().toISOString()}...`) + + switch (command) { + case 'start': + startWorkers({ + numWorkers, + detach: true, + logger, + }) + return process.exit(0) + case 'stop': + return await stopWorkers({ + numWorkers, + signal: 'SIGINT', + logger, + }) + case 'restart': + await stopWorkers({ numWorkers, signal: 'SIGINT', logger }) + startWorkers({ + numWorkers, + detach: true, + logger, + }) + return process.exit(0) + case 'work': + return signalSetup({ + workers: startWorkers({ + numWorkers, + logger, + }), + logger, + }) + case 'workoff': + return signalSetup({ + workers: startWorkers({ + numWorkers, + workoff: true, + logger, + }), + logger, + }) + case 'clear': + return clearQueue({ logger }) + } +} + +// Don't actaully run the worker if we're in a test environment +if (process.env.NODE_ENV !== 'test') { + main() +} diff --git a/packages/jobs/src/consts.ts b/packages/jobs/src/consts.ts new file mode 100644 index 000000000000..d9f33b67301d --- /dev/null +++ b/packages/jobs/src/consts.ts @@ -0,0 +1,29 @@ +import console from 'node:console' + +export const DEFAULT_MAX_ATTEMPTS = 24 +/** 4 hours in seconds */ +export const DEFAULT_MAX_RUNTIME = 14_400 +/** 5 seconds */ +export const DEFAULT_SLEEP_DELAY = 5 + +export const DEFAULT_DELETE_SUCCESSFUL_JOBS = true +export const DEFAULT_DELETE_FAILED_JOBS = false +export const DEFAULT_LOGGER = console +export const DEFAULT_QUEUE = 'default' +export const DEFAULT_WORK_QUEUE = '*' +export const DEFAULT_PRIORITY = 50 +export const DEFAULT_WAIT = 0 +export const DEFAULT_WAIT_UNTIL = null +export const PROCESS_TITLE_PREFIX = 'rw-jobs-worker' +export const DEFAULT_MODEL_NAME = 'BackgroundJob' + +/** + * The name of the exported variable from the jobs config file that contains + * the adapter + */ +export const DEFAULT_ADAPTER_NAME = 'adapter' +/** + * The name of the exported variable from the jobs config file that contains + * the logger + */ +export const DEFAULT_LOGGER_NAME = 'logger' diff --git a/packages/jobs/src/core/Executor.ts b/packages/jobs/src/core/Executor.ts new file mode 100644 index 000000000000..51ae7bc040c2 --- /dev/null +++ b/packages/jobs/src/core/Executor.ts @@ -0,0 +1,96 @@ +// Used by the job runner to execute a job and track success or failure + +import type { BaseAdapter } from '../adapters/BaseAdapter/BaseAdapter.js' +import { + DEFAULT_MAX_ATTEMPTS, + DEFAULT_DELETE_FAILED_JOBS, + DEFAULT_DELETE_SUCCESSFUL_JOBS, + DEFAULT_LOGGER, +} from '../consts.js' +import { AdapterRequiredError, JobRequiredError } from '../errors.js' +import { loadJob } from '../loaders.js' +import type { BaseJob, BasicLogger } from '../types.js' + +interface Options { + adapter: BaseAdapter + job: BaseJob + logger?: BasicLogger + maxAttempts?: number + deleteFailedJobs?: boolean + deleteSuccessfulJobs?: boolean +} + +export const DEFAULTS = { + logger: DEFAULT_LOGGER, + maxAttempts: DEFAULT_MAX_ATTEMPTS, + deleteFailedJobs: DEFAULT_DELETE_FAILED_JOBS, + deleteSuccessfulJobs: DEFAULT_DELETE_SUCCESSFUL_JOBS, +} + +export class Executor { + options: Required + adapter: Options['adapter'] + logger: NonNullable + job: BaseJob + maxAttempts: NonNullable + deleteFailedJobs: NonNullable + deleteSuccessfulJobs: NonNullable + + constructor(options: Options) { + this.options = { ...DEFAULTS, ...options } + + // validate that everything we need is available + if (!this.options.adapter) { + throw new AdapterRequiredError() + } + if (!this.options.job) { + throw new JobRequiredError() + } + + this.adapter = this.options.adapter + this.logger = this.options.logger + this.job = this.options.job + this.maxAttempts = this.options.maxAttempts + this.deleteFailedJobs = this.options.deleteFailedJobs + this.deleteSuccessfulJobs = this.options.deleteSuccessfulJobs + } + + get jobIdentifier() { + return `${this.job.id} (${this.job.path}:${this.job.name})` + } + + async perform() { + this.logger.info(`[RedwoodJob] Started job ${this.jobIdentifier}`) + + try { + const job = await loadJob({ name: this.job.name, path: this.job.path }) + await job.perform(...this.job.args) + + await this.adapter.success({ + job: this.job, + deleteJob: DEFAULT_DELETE_SUCCESSFUL_JOBS, + }) + } catch (error: any) { + this.logger.error( + `[RedwoodJob] Error in job ${this.jobIdentifier}: ${error.message}`, + ) + this.logger.error(error.stack) + + await this.adapter.error({ + job: this.job, + error, + }) + + if (this.job.attempts >= this.maxAttempts) { + this.logger.warn( + this.job, + `[RedwoodJob] Failed job ${this.jobIdentifier}: reached max attempts (${this.maxAttempts})`, + ) + await this.adapter.failure({ + job: this.job, + deleteJob: this.deleteFailedJobs, + }) + } + } + } +} diff --git a/packages/jobs/src/core/JobManager.ts b/packages/jobs/src/core/JobManager.ts new file mode 100644 index 000000000000..927068b873f4 --- /dev/null +++ b/packages/jobs/src/core/JobManager.ts @@ -0,0 +1,85 @@ +import { AdapterNotFoundError } from '../errors.js' +import type { + Adapters, + BasicLogger, + CreateSchedulerConfig, + Job, + JobDefinition, + JobManagerConfig, + ScheduleJobOptions, + WorkerConfig, +} from '../types.js' + +import { Scheduler } from './Scheduler.js' +import type { WorkerOptions } from './Worker.js' +import { Worker } from './Worker.js' + +export interface CreateWorkerArgs { + index: number + workoff: WorkerOptions['workoff'] + clear: WorkerOptions['clear'] + processName: string +} + +export class JobManager< + TAdapters extends Adapters, + TQueues extends string[], + TLogger extends BasicLogger, +> { + adapters: TAdapters + queues: TQueues + logger: TLogger + workers: WorkerConfig[] + + constructor(config: JobManagerConfig) { + this.adapters = config.adapters + this.queues = config.queues + this.logger = config.logger + this.workers = config.workers + } + + createScheduler(schedulerConfig: CreateSchedulerConfig) { + const scheduler = new Scheduler({ + adapter: this.adapters[schedulerConfig.adapter], + logger: this.logger, + }) + + return >( + job: T, + jobArgs?: Parameters, + jobOptions?: ScheduleJobOptions, + ) => { + return scheduler.schedule({ job, jobArgs, jobOptions }) + } + } + + createJob( + jobDefinition: JobDefinition, + ): Job { + // The cast is necessary because the JobDefinition type lacks the `name` and + // `path` properties that are required by the Job type. These properties are + // added to the job at build time by a plugin in the build process. + return jobDefinition as Job + } + + createWorker({ index, workoff, clear, processName }: CreateWorkerArgs) { + const config = this.workers[index] + const adapter = this.adapters[config.adapter] + if (!adapter) { + throw new AdapterNotFoundError(config.adapter.toString()) + } + + return new Worker({ + adapter: this.adapters[config.adapter], + logger: config.logger || this.logger, + maxAttempts: config.maxAttempts, + maxRuntime: config.maxRuntime, + sleepDelay: config.sleepDelay, + deleteFailedJobs: config.deleteFailedJobs, + processName, + queues: [config.queue].flat(), + workoff, + clear, + }) + } +} diff --git a/packages/jobs/src/core/Scheduler.ts b/packages/jobs/src/core/Scheduler.ts new file mode 100644 index 000000000000..d6eb0e971bd6 --- /dev/null +++ b/packages/jobs/src/core/Scheduler.ts @@ -0,0 +1,93 @@ +import type { + BaseAdapter, + SchedulePayload, +} from '../adapters/BaseAdapter/BaseAdapter.js' +import { + DEFAULT_LOGGER, + DEFAULT_PRIORITY, + DEFAULT_WAIT, + DEFAULT_WAIT_UNTIL, +} from '../consts.js' +import { + AdapterNotConfiguredError, + QueueNotDefinedError, + SchedulingError, +} from '../errors.js' +import type { BasicLogger, Job, ScheduleJobOptions } from '../types.js' + +interface SchedulerConfig { + adapter: TAdapter + logger?: BasicLogger +} + +export class Scheduler { + adapter: TAdapter + logger: NonNullable['logger']> + + constructor({ adapter, logger }: SchedulerConfig) { + this.logger = logger ?? DEFAULT_LOGGER + this.adapter = adapter + + if (!this.adapter) { + throw new AdapterNotConfiguredError() + } + } + + computeRunAt({ wait, waitUntil }: { wait: number; waitUntil: Date | null }) { + if (wait && wait > 0) { + return new Date(Date.now() + wait * 1000) + } else if (waitUntil) { + return waitUntil + } else { + return new Date() + } + } + + buildPayload>( + job: T, + args?: Parameters, + options?: ScheduleJobOptions, + ): SchedulePayload { + const queue = job.queue + const priority = job.priority ?? DEFAULT_PRIORITY + const wait = options?.wait ?? DEFAULT_WAIT + const waitUntil = options?.waitUntil ?? DEFAULT_WAIT_UNTIL + + if (!queue) { + throw new QueueNotDefinedError() + } + + return { + name: job.name, + path: job.path, + args: args ?? [], + runAt: this.computeRunAt({ wait, waitUntil }), + queue: queue, + priority: priority, + } + } + + async schedule>({ + job, + jobArgs, + jobOptions, + }: { + job: T + jobArgs?: Parameters + jobOptions?: ScheduleJobOptions + }) { + const payload = this.buildPayload(job, jobArgs, jobOptions) + + this.logger.info(payload, `[RedwoodJob] Scheduling ${job.name}`) + + try { + await this.adapter.schedule(payload) + return true + } catch (e) { + throw new SchedulingError( + `[RedwoodJob] Exception when scheduling ${payload.name}`, + e as Error, + ) + } + } +} diff --git a/packages/jobs/src/core/Worker.ts b/packages/jobs/src/core/Worker.ts new file mode 100644 index 000000000000..1958caabf8d3 --- /dev/null +++ b/packages/jobs/src/core/Worker.ts @@ -0,0 +1,191 @@ +// Used by the job runner to find the next job to run and invoke the Executor + +import { setTimeout } from 'node:timers' + +import type { BaseAdapter } from '../adapters/BaseAdapter/BaseAdapter.js' +import { + DEFAULT_DELETE_FAILED_JOBS, + DEFAULT_DELETE_SUCCESSFUL_JOBS, + DEFAULT_LOGGER, + DEFAULT_MAX_ATTEMPTS, + DEFAULT_MAX_RUNTIME, + DEFAULT_SLEEP_DELAY, +} from '../consts.js' +import { AdapterRequiredError, QueuesRequiredError } from '../errors.js' +import type { BasicLogger } from '../types.js' + +import { Executor } from './Executor.js' + +export interface WorkerOptions { + // required + adapter: BaseAdapter + processName: string + queues: string[] + // optional + logger?: BasicLogger + clear?: boolean + maxAttempts?: number + maxRuntime?: number + deleteSuccessfulJobs?: boolean + deleteFailedJobs?: boolean + sleepDelay?: number + workoff?: boolean + // Makes testing much easier: we can set to false to NOT run in an infinite + // loop by default during tests + forever?: boolean +} + +type CompleteOptions = Required + +const DEFAULT_OPTIONS = { + logger: DEFAULT_LOGGER, + clear: false, + maxAttempts: DEFAULT_MAX_ATTEMPTS, + maxRuntime: DEFAULT_MAX_RUNTIME, + deleteSuccessfulJobs: DEFAULT_DELETE_SUCCESSFUL_JOBS, + deleteFailedJobs: DEFAULT_DELETE_FAILED_JOBS, + sleepDelay: DEFAULT_SLEEP_DELAY, + workoff: false, + forever: true, +} + +export class Worker { + options: CompleteOptions + adapter: CompleteOptions['adapter'] + logger: CompleteOptions['logger'] + clear: CompleteOptions['clear'] + processName: CompleteOptions['processName'] + queues: CompleteOptions['queues'] + maxAttempts: CompleteOptions['maxAttempts'] + maxRuntime: CompleteOptions['maxRuntime'] + deleteSuccessfulJobs: CompleteOptions['deleteSuccessfulJobs'] + deleteFailedJobs: CompleteOptions['deleteFailedJobs'] + sleepDelay: CompleteOptions['sleepDelay'] + forever: CompleteOptions['forever'] + workoff: CompleteOptions['workoff'] + lastCheckTime: Date + + constructor(options: WorkerOptions) { + this.options = { ...DEFAULT_OPTIONS, ...options } + + if (!options?.adapter) { + throw new AdapterRequiredError() + } + + if (!options?.queues || options.queues.length === 0) { + throw new QueuesRequiredError() + } + + this.adapter = this.options.adapter + this.logger = this.options.logger + + // if true, will clear the queue of all jobs and then exit + this.clear = this.options.clear + + // used to set the `lockedBy` field in the database + this.processName = this.options.processName + + // if not given a queue name then will work on jobs in any queue + this.queues = this.options.queues + + // the maximum number of times to retry a failed job + this.maxAttempts = this.options.maxAttempts + + // the maximum amount of time to let a job run in seconds + this.maxRuntime = this.options.maxRuntime + + // whether to keep succeeded jobs in the database + this.deleteSuccessfulJobs = this.options.deleteSuccessfulJobs + + // whether to keep failed jobs in the database after reaching maxAttempts + this.deleteFailedJobs = this.options.deleteFailedJobs + + // the amount of time to wait in milliseconds between checking for jobs. + // the time it took to run a job is subtracted from this time, so this is a + // maximum wait time. Do an `undefined` check here so we can set to 0 + this.sleepDelay = this.options.sleepDelay * 1000 + + // Set to `false` and the work loop will quit when the current job is done + // running (regardless of how many outstanding jobs there are to be worked + // on). The worker process will set this to `false` as soon as the user hits + // ctrl-c so any current job will complete before exiting. + this.forever = this.options.forever + + // Set to `true` if the work loop should run through all *available* jobs + // and then quit. Serves a slightly different purpose than `forever` which + // makes the runner exit immediately after the next loop, where as `workoff` + // doesn't exit the loop until there are no more jobs to work on. + this.workoff = this.options.workoff + + // keep track of the last time we checked for jobs + this.lastCheckTime = new Date() + } + + // Workers run forever unless: + // `this.forever` to false (loop only runs once, then exits) + // `this.workoff` is true (run all jobs in the queue, then exits) + run() { + if (this.clear) { + return this.#clearQueue() + } else { + return this.#work() + } + } + + get queueNames() { + if (this.queues.length === 1 && this.queues[0] === '*') { + return 'all (*)' + } else { + return this.queues.join(', ') + } + } + + async #clearQueue() { + return await this.adapter.clear() + } + + async #work() { + do { + this.lastCheckTime = new Date() + + this.logger.debug( + `[${this.processName}] Checking for jobs in ${this.queueNames} queues...`, + ) + + const job = await this.adapter.find({ + processName: this.processName, + maxRuntime: this.maxRuntime, + queues: this.queues, + }) + + if (job) { + // TODO add timeout handling if runs for more than `this.maxRuntime` + // will need to run Executor in a separate process with a timeout + await new Executor({ + adapter: this.adapter, + logger: this.logger, + job, + maxAttempts: this.maxAttempts, + deleteSuccessfulJobs: this.deleteSuccessfulJobs, + deleteFailedJobs: this.deleteFailedJobs, + }).perform() + } else if (this.workoff) { + // If there are no jobs and we're in workoff mode, we're done + break + } + + // sleep if there were no jobs found, otherwise get back to work + if (!job && this.forever) { + const millsSinceLastCheck = + new Date().getTime() - this.lastCheckTime.getTime() + if (millsSinceLastCheck < this.sleepDelay) { + await this.#wait(this.sleepDelay - millsSinceLastCheck) + } + } + } while (this.forever) + } + + #wait(ms: number) { + return new Promise((resolve) => setTimeout(resolve, ms)) + } +} diff --git a/packages/jobs/src/core/__tests__/Executor.test.ts b/packages/jobs/src/core/__tests__/Executor.test.ts new file mode 100644 index 000000000000..83dbd4162214 --- /dev/null +++ b/packages/jobs/src/core/__tests__/Executor.test.ts @@ -0,0 +1,182 @@ +import { beforeEach, describe, expect, vi, it } from 'vitest' + +import { DEFAULT_LOGGER } from '../../consts.js' +import * as errors from '../../errors.js' +import type { BaseJob } from '../../types.js' +import { Executor } from '../Executor.js' + +import { MockAdapter, mockLogger } from './mocks.js' + +const mocks = vi.hoisted(() => { + return { + loadJob: vi.fn(), + } +}) + +vi.mock('../../loaders.js', () => { + return { + loadJob: mocks.loadJob, + } +}) + +describe('constructor', () => { + const mockAdapter = new MockAdapter() + const mockJob: BaseJob = { + id: 1, + name: 'mockJob', + path: 'mockJob/mockJob', + args: [], + attempts: 0, + } + + it('saves options', () => { + const options = { adapter: mockAdapter, job: mockJob } + const executor = new Executor(options) + + expect(executor.options).toEqual(expect.objectContaining(options)) + }) + + it('extracts adapter from options to variable', () => { + const options = { adapter: mockAdapter, job: mockJob } + const executor = new Executor(options) + + expect(executor.adapter).toEqual(mockAdapter) + }) + + it('extracts job from options to variable', () => { + const options = { adapter: mockAdapter, job: mockJob } + const executor = new Executor(options) + + expect(executor.job).toEqual(mockJob) + }) + + it('extracts logger from options to variable', () => { + const options = { + adapter: mockAdapter, + job: mockJob, + logger: mockLogger, + } + const executor = new Executor(options) + + expect(executor.logger).toEqual(mockLogger) + }) + + it('defaults logger if not provided', () => { + const options = { adapter: mockAdapter, job: mockJob } + const executor = new Executor(options) + + expect(executor.logger).toEqual(DEFAULT_LOGGER) + }) + + it('throws AdapterRequiredError if adapter is not provided', () => { + const options = { job: mockJob } + + // @ts-expect-error testing error case + expect(() => new Executor(options)).toThrow(errors.AdapterRequiredError) + }) + + it('throws JobRequiredError if job is not provided', () => { + const options = { adapter: mockAdapter } + + // @ts-expect-error testing error case + expect(() => new Executor(options)).toThrow(errors.JobRequiredError) + }) +}) + +describe('perform', () => { + beforeEach(() => { + vi.resetAllMocks() + }) + + it('invokes the `perform` method on the job class', async () => { + const mockAdapter = new MockAdapter() + const mockJob = { + id: 1, + name: 'TestJob', + path: 'TestJob/TestJob', + args: ['foo'], + attempts: 0, + + perform: vi.fn(), + } + + const options = { + adapter: mockAdapter, + logger: mockLogger, + job: mockJob, + } + const executor = new Executor(options) + + // mock the `loadJob` loader to return the job mock + mocks.loadJob.mockImplementation(() => mockJob) + + await executor.perform() + + expect(mockJob.perform).toHaveBeenCalledWith('foo') + }) + + it('invokes the `success` method on the adapter when job successful', async () => { + const mockAdapter = new MockAdapter() + const mockJob = { + id: 1, + name: 'TestJob', + path: 'TestJob/TestJob', + args: ['foo'], + attempts: 0, + + perform: vi.fn(), + } + const options = { + adapter: mockAdapter, + logger: mockLogger, + job: mockJob, + } + const executor = new Executor(options) + + // spy on the success function of the adapter + const adapterSpy = vi.spyOn(mockAdapter, 'success') + // mock the `loadJob` loader to return the job mock + mocks.loadJob.mockImplementation(() => mockJob) + + await executor.perform() + + expect(adapterSpy).toHaveBeenCalledWith({ + job: options.job, + deleteJob: true, + }) + }) + + it('invokes the `failure` method on the adapter when job fails', async () => { + const mockAdapter = new MockAdapter() + const mockError = new Error('mock error in the job perform method') + const mockJob = { + id: 1, + name: 'TestJob', + path: 'TestJob/TestJob', + args: ['foo'], + attempts: 0, + + perform: vi.fn(() => { + throw mockError + }), + } + const options = { + adapter: mockAdapter, + logger: mockLogger, + job: mockJob, + } + const executor = new Executor(options) + + // spy on the success function of the adapter + const adapterSpy = vi.spyOn(mockAdapter, 'error') + // mock the `loadJob` loader to return the job mock + mocks.loadJob.mockImplementation(() => mockJob) + + await executor.perform() + + expect(adapterSpy).toHaveBeenCalledWith({ + job: options.job, + error: mockError, + }) + }) +}) diff --git a/packages/jobs/src/core/__tests__/JobManager.test.ts b/packages/jobs/src/core/__tests__/JobManager.test.ts new file mode 100644 index 000000000000..6cfbd407324d --- /dev/null +++ b/packages/jobs/src/core/__tests__/JobManager.test.ts @@ -0,0 +1,153 @@ +import { describe, expect, vi, it, beforeEach } from 'vitest' + +import type { Job, JobDefinition } from '../../types.js' +import { JobManager } from '../JobManager.js' +import { Scheduler } from '../Scheduler.js' + +import { MockAdapter, mockLogger } from './mocks.js' + +vi.mock('../Scheduler') + +describe('constructor', () => { + const mockAdapter = new MockAdapter() + const adapters = { mock: mockAdapter } + const queues = ['queue'] + const logger = mockLogger + const workers = [ + { + adapter: 'mock' as const, + queue: '*', + count: 1, + }, + ] + + let manager: JobManager + + beforeEach(() => { + manager = new JobManager({ + adapters, + queues, + logger, + workers, + }) + }) + + it('saves adapters', () => { + expect(manager.adapters).toEqual({ mock: mockAdapter }) + }) + + it('saves queues', () => { + expect(manager.queues).toEqual(queues) + }) + + it('saves logger', () => { + expect(manager.logger).toEqual(logger) + }) + + it('saves workers', () => { + expect(manager.workers).toEqual(workers) + }) +}) + +describe('createScheduler()', () => { + beforeEach(() => { + vi.resetAllMocks() + }) + + const mockAdapter = new MockAdapter() + + it('returns a function', () => { + const manager = new JobManager({ + adapters: { + mock: mockAdapter, + }, + queues: [], + logger: mockLogger, + workers: [], + }) + + const scheduler = manager.createScheduler({ adapter: 'mock' }) + + expect(scheduler).toBeInstanceOf(Function) + }) + + it('initializes the scheduler with the correct adapter', () => { + const manager = new JobManager({ + adapters: { + mock: mockAdapter, + }, + queues: ['*'], + logger: mockLogger, + workers: [], + }) + manager.createScheduler({ adapter: 'mock', logger: mockLogger }) + + expect(Scheduler).toHaveBeenCalledWith( + expect.objectContaining({ adapter: mockAdapter }), + ) + }) + + it('initializes the scheduler with a logger', () => { + const manager = new JobManager({ + adapters: { + mock: mockAdapter, + }, + queues: [], + logger: mockLogger, + workers: [], + }) + manager.createScheduler({ adapter: 'mock', logger: mockLogger }) + + expect(Scheduler).toHaveBeenCalledWith( + expect.objectContaining({ logger: mockLogger }), + ) + }) + + it('calling the function invokes the schedule() method of the scheduler', () => { + const manager = new JobManager({ + adapters: { + mock: mockAdapter, + }, + queues: ['default'], + logger: mockLogger, + workers: [], + }) + const mockJob: Job = { + queue: 'default', + name: 'mockJob', + path: 'mockJob/mockJob', + + perform: vi.fn(), + } + const mockArgs = ['foo'] + const mockOptions = { wait: 300 } + const scheduler = manager.createScheduler({ adapter: 'mock' }) + + scheduler(mockJob, mockArgs, mockOptions) + + expect(Scheduler.prototype.schedule).toHaveBeenCalledWith({ + job: mockJob, + jobArgs: mockArgs, + jobOptions: mockOptions, + }) + }) +}) + +describe('createJob()', () => { + it('returns the same job description that was passed in', () => { + const manager = new JobManager({ + adapters: {}, + queues: ['default'], + logger: mockLogger, + workers: [], + }) + const jobDefinition: JobDefinition = { + queue: 'default', + perform: vi.fn(), + } + + const job = manager.createJob(jobDefinition) + + expect(job).toEqual(jobDefinition) + }) +}) diff --git a/packages/jobs/src/core/__tests__/Scheduler.test.ts b/packages/jobs/src/core/__tests__/Scheduler.test.ts new file mode 100644 index 000000000000..03b01dc92ae7 --- /dev/null +++ b/packages/jobs/src/core/__tests__/Scheduler.test.ts @@ -0,0 +1,247 @@ +import { describe, expect, vi, it, beforeEach } from 'vitest' + +import { + DEFAULT_PRIORITY, + DEFAULT_WAIT, + DEFAULT_WAIT_UNTIL, +} from '../../consts.js' +import * as errors from '../../errors.js' +import { Scheduler } from '../Scheduler.js' + +import { MockAdapter, mockLogger } from './mocks.js' + +vi.useFakeTimers() + +describe('constructor', () => { + const mockAdapter = new MockAdapter() + + it('saves adapter', () => { + const scheduler = new Scheduler({ + adapter: mockAdapter, + logger: mockLogger, + }) + + expect(scheduler.adapter).toEqual(mockAdapter) + }) + + it('saves logger', () => { + const scheduler = new Scheduler({ + adapter: mockAdapter, + logger: mockLogger, + }) + + expect(scheduler.logger).toEqual(mockLogger) + }) +}) + +describe('computeRunAt()', () => { + const mockAdapter = new MockAdapter() + + it('returns a Date `wait` seconds in the future if `wait` set', () => { + const scheduler = new Scheduler({ + adapter: mockAdapter, + logger: mockLogger, + }) + const wait = 10 + + expect( + scheduler.computeRunAt({ wait, waitUntil: DEFAULT_WAIT_UNTIL }), + ).toEqual(new Date(Date.now() + wait * 1000)) + }) + + it('returns the `waitUntil` Date, if set', () => { + const scheduler = new Scheduler({ + adapter: mockAdapter, + logger: mockLogger, + }) + const waitUntil = new Date(2030, 0, 1, 12, 34, 56) + + expect(scheduler.computeRunAt({ wait: DEFAULT_WAIT, waitUntil })).toEqual( + waitUntil, + ) + }) + + it('falls back to now', () => { + const scheduler = new Scheduler({ + adapter: mockAdapter, + logger: mockLogger, + }) + + expect( + scheduler.computeRunAt({ wait: 0, waitUntil: DEFAULT_WAIT_UNTIL }), + ).toEqual(new Date()) + expect( + scheduler.computeRunAt({ wait: DEFAULT_WAIT, waitUntil: null }), + ).toEqual(new Date()) + }) +}) + +describe('buildPayload()', () => { + const mockAdapter = new MockAdapter() + + it('returns a payload object', () => { + const scheduler = new Scheduler({ + adapter: mockAdapter, + logger: mockLogger, + }) + const job = { + id: 1, + name: 'JobName', + path: 'JobPath/JobPath', + queue: 'default', + priority: 25 as const, + + perform: vi.fn(), + } + const args = [{ foo: 'bar' }] + const payload = scheduler.buildPayload(job, args) + + expect(payload.name).toEqual(job.name) + expect(payload.path).toEqual(job.path) + expect(payload.args).toEqual(args) + expect(payload.runAt).toEqual(new Date()) + expect(payload.queue).toEqual(job.queue) + expect(payload.priority).toEqual(job.priority) + }) + + it('falls back to a default priority', () => { + const scheduler = new Scheduler({ + adapter: mockAdapter, + logger: mockLogger, + }) + const job = { + id: 1, + name: 'JobName', + path: 'JobPath/JobPath', + queue: 'default', + + perform: vi.fn(), + } + const payload = scheduler.buildPayload(job) + + expect(payload.priority).toEqual(DEFAULT_PRIORITY) + }) + + it('takes into account a `wait` time', () => { + const scheduler = new Scheduler({ + adapter: mockAdapter, + logger: mockLogger, + }) + const job = { + id: 1, + name: 'JobName', + path: 'JobPath/JobPath', + queue: 'default', + priority: 25 as const, + + perform: vi.fn(), + } + const options = { wait: 10 } + const payload = scheduler.buildPayload(job, [], options) + + expect(payload.runAt).toEqual(new Date(Date.now() + options.wait * 1000)) + }) + + it('takes into account a `waitUntil` date', () => { + const scheduler = new Scheduler({ + adapter: mockAdapter, + logger: mockLogger, + }) + const job = { + id: 1, + name: 'JobName', + path: 'JobPath/JobPath', + queue: 'default', + priority: 25 as const, + + perform: vi.fn(), + } + const options = { waitUntil: new Date(2030, 0, 1, 12, 34, 56) } + const payload = scheduler.buildPayload(job, [], options) + + expect(payload.runAt).toEqual(options.waitUntil) + }) + + it('throws an error if no queue set', async () => { + const scheduler = new Scheduler({ + adapter: mockAdapter, + logger: mockLogger, + }) + const job = { + id: 1, + name: 'JobName', + path: 'JobPath/JobPath', + priority: 25 as const, + + perform: vi.fn(), + } + + // @ts-expect-error testing error case + expect(() => scheduler.buildPayload(job)).toThrow( + errors.QueueNotDefinedError, + ) + }) +}) + +describe('schedule()', () => { + const mockAdapter = new MockAdapter() + + beforeEach(() => { + vi.resetAllMocks() + }) + + it('calls the schedule() method on the adapter', async () => { + const scheduler = new Scheduler({ + adapter: mockAdapter, + logger: mockLogger, + }) + const job = { + id: 1, + name: 'JobName', + path: 'JobPath/JobPath', + queue: 'default', + + perform: vi.fn(), + } + const args = [{ foo: 'bar' }] + const options = { + wait: 10, + } + + await scheduler.schedule({ job, jobArgs: args, jobOptions: options }) + + expect(mockAdapter.schedule).toHaveBeenCalledWith( + expect.objectContaining({ + name: job.name, + args: args, + }), + ) + }) + + it('re-throws any error that occurs during scheduling', async () => { + mockAdapter.schedule.mockImplementationOnce(() => { + throw new Error('Could not schedule') + }) + + const scheduler = new Scheduler({ + adapter: mockAdapter, + logger: mockLogger, + }) + const job = { + id: 1, + name: 'JobName', + path: 'JobPath/JobPath', + queue: 'default', + + perform: vi.fn(), + } + const args = [{ foo: 'bar' }] + const options = { + wait: 10, + } + + await expect( + scheduler.schedule({ job, jobArgs: args, jobOptions: options }), + ).rejects.toThrow(errors.SchedulingError) + }) +}) diff --git a/packages/jobs/src/core/__tests__/Worker.test.ts b/packages/jobs/src/core/__tests__/Worker.test.ts new file mode 100644 index 000000000000..bd05aed25c57 --- /dev/null +++ b/packages/jobs/src/core/__tests__/Worker.test.ts @@ -0,0 +1,409 @@ +import { beforeEach, describe, expect, vi, it } from 'vitest' + +import { DEFAULT_LOGGER } from '../../consts.js' +import * as errors from '../../errors.js' +import { Executor } from '../Executor.js' +import { Worker } from '../Worker.js' + +import { mockLogger, MockAdapter } from './mocks.js' + +// don't execute any code inside Executor, just spy on whether functions are +// called +vi.mock('../Executor') + +describe('constructor', () => { + it('saves options', () => { + const options = { + adapter: new MockAdapter(), + logger: mockLogger, + queues: ['*'], + processName: 'mockProcessName', + } + const worker = new Worker(options) + + expect(worker.options.adapter).toEqual(options.adapter) + }) + + it('extracts adapter from options to variable', () => { + const options = { + adapter: new MockAdapter(), + logger: mockLogger, + queues: ['*'], + processName: 'mockProcessName', + } + const worker = new Worker(options) + + expect(worker.adapter).toEqual(options.adapter) + }) + + it('extracts logger from options to variable', () => { + const options = { + adapter: new MockAdapter(), + logger: mockLogger, + queues: ['*'], + processName: 'mockProcessName', + } + const worker = new Worker(options) + + expect(worker.logger).toEqual(mockLogger) + }) + + it('defaults logger if not provided', () => { + const options = { + adapter: new MockAdapter(), + queues: ['*'], + processName: 'mockProcessName', + } + const worker = new Worker(options) + + expect(worker.logger).toEqual(DEFAULT_LOGGER) + }) + + it('extracts processName from options to variable', () => { + const options = { + adapter: new MockAdapter(), + logger: mockLogger, + queues: ['*'], + processName: 'mockProcessName', + } + const worker = new Worker(options) + + expect(worker.processName).toEqual('mockProcessName') + }) + + it('extracts queue from options to variable', () => { + const options = { + adapter: new MockAdapter(), + logger: mockLogger, + queues: ['default'], + processName: 'mockProcessName', + } + const worker = new Worker(options) + + expect(worker.queues).toEqual(['default']) + }) + + it('extracts clear from options to variable', () => { + const options = { + adapter: new MockAdapter(), + logger: mockLogger, + queues: ['*'], + processName: 'mockProcessName', + clear: true, + } + const worker = new Worker(options) + + expect(worker.clear).toEqual(true) + }) + + it('defaults clear if not provided', () => { + const options = { + adapter: new MockAdapter(), + logger: mockLogger, + queues: ['*'], + processName: 'mockProcessName', + } + const worker = new Worker(options) + + expect(worker.clear).toEqual(false) + }) + + it('extracts maxAttempts from options to variable', () => { + const options = { + adapter: new MockAdapter(), + logger: mockLogger, + queues: ['*'], + processName: 'mockProcessName', + maxAttempts: 10, + } + const worker = new Worker(options) + + expect(worker.maxAttempts).toEqual(10) + }) + + it('extracts maxRuntime from options to variable', () => { + const options = { + adapter: new MockAdapter(), + logger: mockLogger, + queues: ['*'], + processName: 'mockProcessName', + maxRuntime: 10, + } + const worker = new Worker(options) + + expect(worker.maxRuntime).toEqual(10) + }) + + it('extracts deleteFailedJobs from options to variable', () => { + const options = { + adapter: new MockAdapter(), + logger: mockLogger, + queues: ['*'], + processName: 'mockProcessName', + deleteFailedJobs: true, + } + const worker = new Worker(options) + + expect(worker.deleteFailedJobs).toEqual(true) + }) + + it('extracts sleepDelay from options to variable', () => { + const options = { + adapter: new MockAdapter(), + logger: mockLogger, + queues: ['*'], + processName: 'mockProcessName', + sleepDelay: 5, + } + const worker = new Worker(options) + + expect(worker.sleepDelay).toEqual(5000) + }) + + it('can set sleepDelay to 0', () => { + const options = { + adapter: new MockAdapter(), + logger: mockLogger, + queues: ['*'], + processName: 'mockProcessName', + sleepDelay: 0, + } + const worker = new Worker(options) + + expect(worker.sleepDelay).toEqual(0) + }) + + it('sets forever', () => { + const options = { + adapter: new MockAdapter(), + logger: mockLogger, + queues: ['*'], + processName: 'mockProcessName', + } + const worker = new Worker(options) + + expect(worker.forever).toEqual(true) + }) + + it('extracts workoff from options to variable', () => { + const options = { + adapter: new MockAdapter(), + logger: mockLogger, + queues: ['*'], + processName: 'mockProcessName', + workoff: true, + } + const worker = new Worker(options) + + expect(worker.workoff).toEqual(true) + }) + + it('defaults workoff if not provided', () => { + const options = { + adapter: new MockAdapter(), + logger: mockLogger, + processName: 'mockProcessName', + queues: ['*'], + } + const worker = new Worker(options) + + expect(worker.workoff).toEqual(false) + }) + + it('sets lastCheckTime to the current time', () => { + const options = { + adapter: new MockAdapter(), + logger: mockLogger, + processName: 'mockProcessName', + queues: ['*'], + } + const worker = new Worker(options) + + expect(worker.lastCheckTime).toBeInstanceOf(Date) + }) + + it('throws an error if adapter not set', () => { + // @ts-expect-error testing error case + expect(() => new Worker()).toThrow(errors.AdapterRequiredError) + }) + + it('throws an error if queues not set', () => { + const options = { + adapter: new MockAdapter(), + } + // @ts-expect-error testing error case + expect(() => new Worker(options)).toThrow(errors.QueuesRequiredError) + }) + + it('throws an error if queues is an empty array', () => { + const options = { + adapter: new MockAdapter(), + queues: [], + } + // @ts-expect-error testing error case + expect(() => new Worker(options)).toThrow(errors.QueuesRequiredError) + }) +}) + +describe('run', async () => { + beforeEach(() => { + vi.resetAllMocks() + }) + + it('tries to find a job', async () => { + const adapter = new MockAdapter() + const worker = new Worker({ + adapter, + logger: mockLogger, + processName: 'mockProcessName', + queues: ['*'], + sleepDelay: 0, + forever: false, + }) + + await worker.run() + + expect(adapter.find).toHaveBeenCalledWith({ + processName: worker.processName, + maxRuntime: worker.maxRuntime, + queues: worker.queues, + }) + }) + + it('will try to find jobs in a loop until `forever` is set to `false`', async () => { + const adapter = new MockAdapter() + const worker = new Worker({ + adapter, + logger: mockLogger, + processName: 'mockProcessName', + queues: ['*'], + sleepDelay: 0.01, + forever: true, + }) + + worker.run() + // just enough delay to run through the loop twice + await new Promise((resolve) => setTimeout(resolve, 20)) + worker.forever = false + expect(adapter.find).toHaveBeenCalledTimes(2) + }) + + it('does nothing if no job found and forever=false', async () => { + const adapter = new MockAdapter() + + const worker = new Worker({ + adapter, + logger: mockLogger, + processName: 'mockProcessName', + queues: ['*'], + sleepDelay: 0, + forever: false, + }) + await worker.run() + + expect(Executor).not.toHaveBeenCalled() + }) + + it('exits if no job found and workoff=true', async () => { + const adapter = new MockAdapter() + + const worker = new Worker({ + adapter, + logger: mockLogger, + processName: 'mockProcessName', + queues: ['*'], + sleepDelay: 0, + workoff: true, + }) + await worker.run() + + expect(Executor).not.toHaveBeenCalled() + }) + + it('loops until no job found when workoff=true', async () => { + const adapter = new MockAdapter() + adapter.find + .mockImplementationOnce(() => ({ + id: 1, + name: 'mockJobName', + path: 'mockJobPath', + args: [], + attempts: 0, + })) + .mockImplementationOnce(() => undefined) + + const worker = new Worker({ + adapter, + logger: mockLogger, + processName: 'mockProcessName', + queues: ['*'], + sleepDelay: 0, + workoff: true, + }) + await worker.run() + + expect(Executor).toHaveBeenCalledOnce() + }) + + it('initializes an Executor instance if the job is found', async () => { + const adapter = new MockAdapter() + adapter.find.mockImplementationOnce(() => ({ + id: 1, + name: 'mockJobName', + path: 'mockJobPath', + args: [], + attempts: 0, + })) + const worker = new Worker({ + adapter, + logger: mockLogger, + processName: 'mockProcessName', + queues: ['*'], + sleepDelay: 0, + forever: false, + maxAttempts: 10, + deleteSuccessfulJobs: false, + deleteFailedJobs: true, + }) + + await worker.run() + + expect(Executor).toHaveBeenCalledWith({ + adapter, + job: { + id: 1, + name: 'mockJobName', + path: 'mockJobPath', + args: [], + attempts: 0, + }, + logger: worker.logger, + maxAttempts: 10, + deleteSuccessfulJobs: false, + deleteFailedJobs: true, + }) + }) + + it('calls `perform` on the Executor instance', async () => { + const adapter = new MockAdapter() + adapter.find.mockImplementationOnce(() => ({ + id: 1, + name: 'mockJobName', + path: 'mockJobPath', + args: [], + attempts: 0, + })) + const worker = new Worker({ + adapter, + logger: mockLogger, + processName: 'mockProcessName', + queues: ['*'], + sleepDelay: 0, + forever: false, + }) + + await worker.run() + + expect(Executor.prototype.perform).toHaveBeenCalled() + }) +}) diff --git a/packages/jobs/src/core/__tests__/mocks.ts b/packages/jobs/src/core/__tests__/mocks.ts new file mode 100644 index 000000000000..bcc915cd2a40 --- /dev/null +++ b/packages/jobs/src/core/__tests__/mocks.ts @@ -0,0 +1,33 @@ +import { vi } from 'vitest' + +import type { + ErrorOptions, + FailureOptions, + FindArgs, + SchedulePayload, + SuccessOptions, +} from '../../adapters/BaseAdapter/BaseAdapter.js' +import { BaseAdapter } from '../../adapters/BaseAdapter/BaseAdapter.js' +import type { BasicLogger, PossibleBaseJob } from '../../types.js' + +export const mockLogger: BasicLogger = { + info: vi.fn(() => {}), + debug: vi.fn(() => {}), + warn: vi.fn(() => {}), + error: vi.fn(() => {}), +} + +export class MockAdapter extends BaseAdapter { + constructor() { + super({ + logger: mockLogger, + }) + } + + schedule = vi.fn((_payload: SchedulePayload): void => {}) + find = vi.fn((_args: FindArgs): PossibleBaseJob => undefined) + success = vi.fn((_options: SuccessOptions): void => {}) + error = vi.fn((_options: ErrorOptions): void => {}) + failure = vi.fn((_options: FailureOptions): void => {}) + clear = vi.fn((): void => {}) +} diff --git a/packages/jobs/src/errors.ts b/packages/jobs/src/errors.ts new file mode 100644 index 000000000000..1914aa3461f9 --- /dev/null +++ b/packages/jobs/src/errors.ts @@ -0,0 +1,178 @@ +const JOBS_CONFIG_FILENAME = 'jobs.ts/js' + +/** + * Parent class for any RedwoodJob-related error + */ +export class RedwoodJobError extends Error { + constructor(message: string) { + super(message) + this.name = this.constructor.name + } +} + +/** + * Thrown when trying to configure a scheduler without an adapter + */ +export class AdapterNotConfiguredError extends RedwoodJobError { + constructor() { + super('No adapter configured for the job scheduler') + } +} + +/** + * Thrown when the Worker or Executor is instantiated without an adapter + */ +export class AdapterRequiredError extends RedwoodJobError { + constructor() { + super('`adapter` is required to perform a job') + } +} + +/** + * Thrown when the Worker is instantiated without an array of queues + */ +export class QueuesRequiredError extends RedwoodJobError { + constructor() { + super('`queues` is required to find a job to run') + } +} + +/** + * Thrown when the Executor is instantiated without a job + */ +export class JobRequiredError extends RedwoodJobError { + constructor() { + super('`job` is required to perform a job') + } +} + +/** + * Thrown when a job with the given handler is not found in the filesystem + */ +export class JobNotFoundError extends RedwoodJobError { + constructor(name: string) { + super(`Job \`${name}\` not found in the filesystem`) + } +} + +/** + * Thrown when a job file exists, but the export does not match the filename + */ +export class JobExportNotFoundError extends RedwoodJobError { + constructor(name: string) { + super(`Job file \`${name}\` does not export a class with the same name`) + } +} + +/** + * Thrown when the runner tries to import `adapter` from api/src/lib/jobs.js|ts and + * the file does not exist + */ +export class JobsLibNotFoundError extends RedwoodJobError { + constructor() { + super( + `api/src/lib/${JOBS_CONFIG_FILENAME} not found. Run \`yarn rw setup jobs\` to create this file and configure background jobs`, + ) + } +} + +/** + * Thrown when the runner tries to import `adapter` from api/src/lib/jobs.js|ts + */ +export class AdapterNotFoundError extends RedwoodJobError { + constructor(name: string) { + super( + `api/src/lib/${JOBS_CONFIG_FILENAME} does not export an adapter named \`${name}\``, + ) + } +} + +/** + * Thrown when the runner tries to import `logger` from api/src/lib/jobs.js|ts + */ +export class LoggerNotFoundError extends RedwoodJobError { + constructor(name: string) { + super( + `api/src/lib/${JOBS_CONFIG_FILENAME} does not export a logger named \`${name}\``, + ) + } +} + +/** + * Thrown when the runner tries to import `workerConfig` from api/src/lib/jobs.js|ts + */ +export class WorkerConfigNotFoundError extends RedwoodJobError { + constructor(name: string) { + super(`api/src/lib/#{JOBS_CONFIG_FILENAME} does not export \`${name}\``) + } +} + +/** + * Parent class for any job error where we want to wrap the underlying error + * in our own. Use by extending this class and passing the original error to + * the constructor: + * + * ```typescript + * try { + * throw new Error('Generic error') + * } catch (e) { + * throw new RethrowJobError('Custom Error Message', e) + * } + * ``` + */ +export class RethrownJobError extends RedwoodJobError { + originalError: Error + stackBeforeRethrow: string | undefined + + constructor(message: string, error: Error) { + super(message) + + if (!error) { + throw new Error( + 'RethrownJobError requires a message and existing error object', + ) + } + + this.originalError = error + this.stackBeforeRethrow = this.stack + + const messageLines = (this.message.match(/\n/g) || []).length + 1 + this.stack = + this.stack + ?.split('\n') + .slice(0, messageLines + 1) + .join('\n') + + '\n' + + error.stack + } +} + +/** + * Thrown when there is an error scheduling a job, wraps the underlying error + */ +export class SchedulingError extends RethrownJobError { + constructor(message: string, error: Error) { + super(message, error) + } +} + +/** + * Thrown when there is an error performing a job, wraps the underlying error + */ +export class PerformError extends RethrownJobError { + constructor(message: string, error: Error) { + super(message, error) + } +} + +export class QueueNotDefinedError extends RedwoodJobError { + constructor() { + super('Scheduler requires a named `queue` to place jobs in') + } +} + +export class WorkerConfigIndexNotFoundError extends RedwoodJobError { + constructor(index: number) { + super(`Worker index ${index} not found in jobs config`) + } +} diff --git a/packages/jobs/src/index.ts b/packages/jobs/src/index.ts new file mode 100644 index 000000000000..850957f3dcee --- /dev/null +++ b/packages/jobs/src/index.ts @@ -0,0 +1,8 @@ +export * from './errors.js' + +export { JobManager } from './core/JobManager.js' +export { Executor } from './core/Executor.js' +export { Worker } from './core/Worker.js' + +export { BaseAdapter } from './adapters/BaseAdapter/BaseAdapter.js' +export { PrismaAdapter } from './adapters/PrismaAdapter/PrismaAdapter.js' diff --git a/packages/jobs/src/loaders.ts b/packages/jobs/src/loaders.ts new file mode 100644 index 000000000000..303fb4e73f2e --- /dev/null +++ b/packages/jobs/src/loaders.ts @@ -0,0 +1,62 @@ +import fs from 'node:fs' +import path from 'node:path' + +import { getPaths } from '@redwoodjs/project-config' + +import type { JobManager } from './core/JobManager.js' +import { JobsLibNotFoundError, JobNotFoundError } from './errors.js' +import type { + Adapters, + BasicLogger, + Job, + JobComputedProperties, +} from './types.js' +import { makeFilePath } from './util.js' + +/** + * Loads the job manager from the users project + * + * @returns JobManager + */ +export const loadJobsManager = async (): Promise< + JobManager +> => { + // Confirm the specific lib/jobs.ts file exists + const jobsConfigPath = getPaths().api.distJobsConfig + if (!jobsConfigPath) { + throw new JobsLibNotFoundError() + } + + // Import the jobs manager + const importPath = makeFilePath(jobsConfigPath) + const { jobs } = await import(importPath) + if (!jobs) { + throw new JobsLibNotFoundError() + } + + return jobs +} + +/** + * Load a specific job implementation from the users project + */ +export const loadJob = async ({ + name: jobName, + path: jobPath, +}: JobComputedProperties): Promise> => { + // Confirm the specific job file exists + const completeJobPath = path.join(getPaths().api.distJobs, jobPath) + '.js' + + if (!fs.existsSync(completeJobPath)) { + throw new JobNotFoundError(jobName) + } + + const importPath = makeFilePath(completeJobPath) + const jobModule = await import(importPath) + + if (!jobModule[jobName]) { + throw new JobNotFoundError(jobName) + } + + return jobModule[jobName] +} diff --git a/packages/jobs/src/types.ts b/packages/jobs/src/types.ts new file mode 100644 index 000000000000..9493a9d85ba1 --- /dev/null +++ b/packages/jobs/src/types.ts @@ -0,0 +1,300 @@ +// Defines the basic shape of a logger that RedwoodJob will invoke to print +// debug messages. RedwoodJob will fallback to use `console` if no +// logger is passed in to RedwoodJob or any adapter. Luckily both Redwood's + +import type { BaseAdapter } from './adapters/BaseAdapter/BaseAdapter.js' + +// Redwood's logger and the standard console logger conform to this shape. +export interface BasicLogger { + debug: (message?: any, ...optionalParams: any[]) => void + info: (message?: any, ...optionalParams: any[]) => void + warn: (message?: any, ...optionalParams: any[]) => void + error: (message?: any, ...optionalParams: any[]) => void +} + +// This is the minimum interface that a "job" must conform to in order to be +// scheduled and executed by Redwood's job engine. +export interface BaseJob { + id: string | number + name: string + path: string + args: unknown[] + attempts: number +} +export type PossibleBaseJob = BaseJob | undefined + +export type Adapters = Record + +export interface WorkerConfig< + TAdapters extends Adapters, + TQueues extends string[], +> { + /** + * The name of the adapter to use for this worker. This must be one of the keys + * in the `adapters` object when you created the `JobManager`. + */ + adapter: keyof TAdapters + + /** + * The queue or queues that this worker should work on. You can pass a single + * queue name, an array of queue names, or the string `'*'` to work on all + * queues. + */ + queue: '*' | TQueues[number] | TQueues[number][] + + /** + * The maximum number of retries to attempt for a job before giving up. + * + * @default 24 + */ + maxAttempts?: number + + /** + * The maximum amount of time in seconds that a job can run before another + * worker will attempt to retry it. + * + * @default 14,400 (4 hours) + */ + maxRuntime?: number + + /** + * Whether a job that exceeds its `maxAttempts` should be deleted from the + * queue. If `false`, the job will remain in the queue but will not be + * processed further. + * + * @default false + */ + deleteFailedJobs?: boolean + + /** + * The amount of time in seconds to wait between polling the queue for new + * jobs. Some adapters may not need this if they do not poll the queue and + * instead rely on a subscription model. + * + * @default 5 + */ + sleepDelay?: number + + /** + * The number of workers to spawn for this worker configuration. + * + * @default 1 + */ + count?: number + + /** + * The logger to use for this worker. If not provided, the logger from the + * `JobManager` will be used. + */ + logger?: BasicLogger +} + +export interface JobManagerConfig< + // + TAdapters extends Adapters, + TQueues extends string[], + TLogger extends BasicLogger, + // +> { + /** + * An object containing all of the adapters that this job manager will use. + * The keys should be the names of the adapters and the values should be the + * adapter instances. + */ + adapters: TAdapters + + /** + * The logger to use for this job manager. If not provided, the logger will + * default to the console. + */ + logger: TLogger + + /** + * An array of all of queue names that jobs can be scheduled on to. Workers can + * be configured to work on a selection of these queues. + */ + queues: TQueues + + /** + * An array of worker configurations that define how jobs should be processed. + */ + workers: WorkerConfig[] +} + +export interface CreateSchedulerConfig { + /** + * The name of the adapter to use for this scheduler. This must be one of the keys + * in the `adapters` object when you created the `JobManager`. + */ + adapter: keyof TAdapters + + /** + * The logger to use for this scheduler. If not provided, the logger from the + * `JobManager` will be used. + */ + logger?: BasicLogger +} + +export interface JobDefinition< + TQueues extends string[], + TArgs extends unknown[] = [], +> { + /** + * The name of the queue that this job should always be scheduled on. This + * must be one of the values in the `queues` array when you created the + * `JobManager`. + */ + queue: TQueues[number] + + /** + * The priority of the job in the range of 0-100. The lower the number, the + * higher the priority. The default is 50. + * @default 50 + */ + priority?: PriorityValue + + /** + * The function to run when this job is executed. + * + * @param args The arguments that were passed when the job was scheduled. + */ + perform: (...args: TArgs) => Promise | void +} + +export type JobComputedProperties = { + /** + * The name of the job that was defined in the job file. + */ + name: string + + /** + * The path to the job file that this job was defined in. + */ + path: string +} + +export type Job< + TQueues extends string[], + TArgs extends unknown[] = [], +> = JobDefinition & JobComputedProperties + +export type ScheduleJobOptions = + | { + /** + * The number of seconds to wait before scheduling this job. This is mutually + * exclusive with `waitUntil`. + */ + wait: number + waitUntil?: never + } + | { + wait?: never + /** + * The date and time to schedule this job for. This is mutually exclusive with + * `wait`. + */ + waitUntil: Date + } + +type PriorityValue = + | 0 + | 1 + | 2 + | 3 + | 4 + | 5 + | 6 + | 7 + | 8 + | 9 + | 10 + | 11 + | 12 + | 13 + | 14 + | 15 + | 16 + | 17 + | 18 + | 19 + | 20 + | 21 + | 22 + | 23 + | 24 + | 25 + | 26 + | 27 + | 28 + | 29 + | 30 + | 31 + | 32 + | 33 + | 34 + | 35 + | 36 + | 37 + | 38 + | 39 + | 40 + | 41 + | 42 + | 43 + | 44 + | 45 + | 46 + | 47 + | 48 + | 49 + | 50 + | 51 + | 52 + | 53 + | 54 + | 55 + | 56 + | 57 + | 58 + | 59 + | 60 + | 61 + | 62 + | 63 + | 64 + | 65 + | 66 + | 67 + | 68 + | 69 + | 70 + | 71 + | 72 + | 73 + | 74 + | 75 + | 76 + | 77 + | 78 + | 79 + | 80 + | 81 + | 82 + | 83 + | 84 + | 85 + | 86 + | 87 + | 88 + | 89 + | 90 + | 91 + | 92 + | 93 + | 94 + | 95 + | 96 + | 97 + | 98 + | 99 + | 100 diff --git a/packages/jobs/src/util.ts b/packages/jobs/src/util.ts new file mode 100644 index 000000000000..e8fb0fba2f9b --- /dev/null +++ b/packages/jobs/src/util.ts @@ -0,0 +1,6 @@ +import { pathToFileURL } from 'node:url' + +// TODO(jgmw): Refactor and move this into `@redwoodjs/project-config` or similar +export function makeFilePath(path: string) { + return pathToFileURL(path).href +} diff --git a/packages/jobs/tsconfig.build.json b/packages/jobs/tsconfig.build.json new file mode 100644 index 000000000000..b8547d222235 --- /dev/null +++ b/packages/jobs/tsconfig.build.json @@ -0,0 +1,12 @@ +{ + "extends": "../../tsconfig.compilerOption.json", + "compilerOptions": { + "rootDir": "src", + "outDir": "dist", + "moduleResolution": "Node16", + "module": "Node16", + "tsBuildInfoFile": "./tsconfig.build.tsbuildinfo" + }, + "include": ["src"], + "references": [{ "path": "../cli-helpers" }, { "path": "../project-config" }] +} diff --git a/packages/jobs/tsconfig.cjs.json b/packages/jobs/tsconfig.cjs.json new file mode 100644 index 000000000000..6c80953a45d7 --- /dev/null +++ b/packages/jobs/tsconfig.cjs.json @@ -0,0 +1,15 @@ +{ + "extends": "./tsconfig.build.json", + "compilerOptions": { + "outDir": "dist/cjs", + "tsBuildInfoFile": "./tsconfig.cjs.tsbuildinfo" + }, + "exclude": [ + "dist", + "node_modules", + "**/__tests__", + "**/__mocks__", + "**/*.test.*", + "./src/bins/**/*" + ] +} diff --git a/packages/jobs/tsconfig.json b/packages/jobs/tsconfig.json new file mode 100644 index 000000000000..799799136e51 --- /dev/null +++ b/packages/jobs/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../../tsconfig.compilerOption.json", + "compilerOptions": { + "moduleResolution": "Node16", + "module": "Node16" + }, + "include": ["."], + "exclude": ["dist", "node_modules", "**/__mocks__", "**/__tests__/fixtures"], + "references": [{ "path": "../cli-helpers" }, { "path": "../project-config" }] +} diff --git a/packages/jobs/vitest.config.ts b/packages/jobs/vitest.config.ts new file mode 100644 index 000000000000..68ca131fd8cb --- /dev/null +++ b/packages/jobs/vitest.config.ts @@ -0,0 +1,9 @@ +import { defineConfig, configDefaults } from 'vitest/config' + +export default defineConfig({ + test: { + testTimeout: 15_000, + exclude: [...configDefaults.exclude, '**/fixtures', '**/dist'], + logHeapUsage: true, + }, +}) diff --git a/packages/prerender/.babelrc.js b/packages/prerender/.babelrc.js deleted file mode 100644 index 3b2c815712d9..000000000000 --- a/packages/prerender/.babelrc.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = { extends: '../../babel.config.js' } diff --git a/packages/prerender/browserUtils/index.js b/packages/prerender/browserUtils/index.js deleted file mode 100644 index ab5b0031e224..000000000000 --- a/packages/prerender/browserUtils/index.js +++ /dev/null @@ -1,2 +0,0 @@ -/* eslint-env es6, commonjs */ -module.exports = require('../dist/browserUtils') diff --git a/packages/prerender/browserUtils/package.json b/packages/prerender/browserUtils/package.json deleted file mode 100644 index 9ad9aacba267..000000000000 --- a/packages/prerender/browserUtils/package.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "main": "./index.js", - "types": "../dist/browserUtils/index.d.ts" -} diff --git a/packages/prerender/build.mts b/packages/prerender/build.mts new file mode 100644 index 000000000000..16175a6725c0 --- /dev/null +++ b/packages/prerender/build.mts @@ -0,0 +1,3 @@ +import { build } from '@redwoodjs/framework-tools' + +await build() diff --git a/packages/prerender/detection/index.js b/packages/prerender/detection/index.js deleted file mode 100644 index ba249a1a85b6..000000000000 --- a/packages/prerender/detection/index.js +++ /dev/null @@ -1,2 +0,0 @@ -/* eslint-env es6, commonjs */ -module.exports = require('../dist/detection') diff --git a/packages/prerender/detection/package.json b/packages/prerender/detection/package.json deleted file mode 100644 index 5e55a1e86633..000000000000 --- a/packages/prerender/detection/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "main": "./index.js" -} diff --git a/packages/prerender/package.json b/packages/prerender/package.json index fd798f21f1d9..6790b9328e2c 100644 --- a/packages/prerender/package.json +++ b/packages/prerender/package.json @@ -8,24 +8,39 @@ "directory": "packages/prerender" }, "license": "MIT", + "type": "commonjs", + "exports": { + ".": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + }, + "./browserUtils": { + "types": "./dist/browserUtils/index.d.ts", + "default": "./dist/browserUtils/index.js" + }, + "./detection": { + "types": "./dist/detection/index.d.ts", + "default": "./dist/detection/index.js" + } + }, "main": "dist/index.js", + "types": "./dist/index.d.ts", "files": [ - "dist", - "browserUtils", - "detection" + "dist" ], "scripts": { - "build": "yarn build:js && yarn build:types", - "build:js": "babel src -d dist --extensions \".js,.jsx,.ts,.tsx,.jsx\"", + "build": "tsx ./build.mts && yarn build:types", "build:pack": "yarn pack -o redwoodjs-prerender.tgz", - "build:types": "tsc --build --verbose", + "build:types": "tsc --build --verbose ./tsconfig.build.json", "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx,template\" --ignore dist --exec \"yarn build\"", + "check:attw": "yarn rw-fwtools-attw", + "check:package": "concurrently npm:check:attw yarn:publint", "prepublishOnly": "yarn build", "test": "vitest run", "test:watch": "vitest watch" }, "dependencies": { - "@babel/runtime-corejs3": "7.25.0", + "@redwoodjs/babel-config": "workspace:*", "@redwoodjs/project-config": "workspace:*", "@redwoodjs/router": "workspace:*", "@redwoodjs/structure": "workspace:*", @@ -33,15 +48,16 @@ "@whatwg-node/fetch": "0.9.20", "babel-plugin-ignore-html-and-css-imports": "0.1.0", "cheerio": "1.0.0", - "core-js": "3.38.0", "graphql": "16.9.0", "mime-types": "2.1.35" }, "devDependencies": { - "@babel/cli": "7.24.8", - "@babel/core": "^7.22.20", + "@redwoodjs/framework-tools": "workspace:*", "@types/mime-types": "2.1.4", "babel-plugin-tester": "11.0.4", + "concurrently": "8.2.2", + "publint": "0.2.10", + "tsx": "4.17.0", "typescript": "5.5.4", "vitest": "2.0.5" }, diff --git a/packages/prerender/tsconfig.build.json b/packages/prerender/tsconfig.build.json new file mode 100644 index 000000000000..89552aaef821 --- /dev/null +++ b/packages/prerender/tsconfig.build.json @@ -0,0 +1,17 @@ +{ + "extends": "../../tsconfig.compilerOption.json", + "compilerOptions": { + "rootDir": "src", + "outDir": "dist", + "allowJs": true + }, + "include": ["./src/**/*", "ambient.d.ts"], + "references": [ + { "path": "../babel-config" }, + { "path": "../project-config" }, + { "path": "../router/tsconfig.build.json" }, + { "path": "../structure" }, + { "path": "../web/tsconfig.build.json" }, + { "path": "../framework-tools" } + ] +} diff --git a/packages/prerender/tsconfig.json b/packages/prerender/tsconfig.json index 78d1d766d16e..8bf851c890fc 100644 --- a/packages/prerender/tsconfig.json +++ b/packages/prerender/tsconfig.json @@ -1,17 +1,18 @@ { "extends": "../../tsconfig.compilerOption.json", "compilerOptions": { - "rootDir": "src", - "outDir": "dist", + "module": "Node16", + "moduleResolution": "Node16", "allowJs": true }, - "include": ["./src/**/*", "ambient.d.ts"], + "include": ["."], "references": [ - { "path": "../web/tsconfig.build.json" }, - { "path": "../router/tsconfig.build.json" }, - { "path": "../internal" }, + { "path": "../babel-config" }, { "path": "../project-config" }, - { "path": "../auth/tsconfig.build.json" }, - { "path": "../structure" } - ] + { "path": "../router/tsconfig.build.json" }, + { "path": "../structure" }, + { "path": "../web/tsconfig.build.json" }, + { "path": "../framework-tools" } + ], + "exclude": ["dist", "node_modules", "**/__mocks__", "**/__tests__/fixtures"] } diff --git a/packages/project-config/package.json b/packages/project-config/package.json index 812803e37ab8..fd6c68152a03 100644 --- a/packages/project-config/package.json +++ b/packages/project-config/package.json @@ -27,7 +27,7 @@ "scripts": { "build": "tsx ./build.ts && run build:types", "build:pack": "yarn pack -o redwoodjs-project-config.tgz", - "build:types": "tsc --build --verbose ./tsconfig.json ./tsconfig.types-cjs.json", + "build:types": "tsc --build --verbose ./tsconfig.json ./tsconfig.cjs.json", "build:watch": "nodemon --watch src --ext \"js,ts,tsx\" --ignore dist --exec \"yarn build\"", "check:attw": "yarn attw -P", "check:package": "concurrently npm:check:attw yarn:publint", diff --git a/packages/project-config/src/__tests__/paths.test.ts b/packages/project-config/src/__tests__/paths.test.ts index 6f85bf54a312..7446c131d90d 100644 --- a/packages/project-config/src/__tests__/paths.test.ts +++ b/packages/project-config/src/__tests__/paths.test.ts @@ -94,6 +94,11 @@ describe('paths', () => { types: path.join(FIXTURE_BASEDIR, 'api', 'types'), models: path.join(FIXTURE_BASEDIR, 'api', 'src', 'models'), mail: path.join(FIXTURE_BASEDIR, 'api', 'src', 'mail'), + jobs: path.join(FIXTURE_BASEDIR, 'api', 'src', 'jobs'), + jobsConfig: null, + distJobs: path.join(FIXTURE_BASEDIR, 'api', 'dist', 'jobs'), + distJobsConfig: null, + logger: path.join(FIXTURE_BASEDIR, 'api', 'src', 'lib', 'logger.ts'), }, web: { routes: path.join(FIXTURE_BASEDIR, 'web', 'src', 'Routes.tsx'), @@ -363,6 +368,11 @@ describe('paths', () => { types: path.join(FIXTURE_BASEDIR, 'api', 'types'), models: path.join(FIXTURE_BASEDIR, 'api', 'src', 'models'), mail: path.join(FIXTURE_BASEDIR, 'api', 'src', 'mail'), + jobs: path.join(FIXTURE_BASEDIR, 'api', 'src', 'jobs'), + jobsConfig: null, + distJobs: path.join(FIXTURE_BASEDIR, 'api', 'dist', 'jobs'), + distJobsConfig: null, + logger: null, }, web: { routes: path.join(FIXTURE_BASEDIR, 'web', 'src', 'Routes.js'), @@ -678,6 +688,11 @@ describe('paths', () => { types: path.join(FIXTURE_BASEDIR, 'api', 'types'), models: path.join(FIXTURE_BASEDIR, 'api', 'src', 'models'), mail: path.join(FIXTURE_BASEDIR, 'api', 'src', 'mail'), + jobs: path.join(FIXTURE_BASEDIR, 'api', 'src', 'jobs'), + jobsConfig: null, + distJobs: path.join(FIXTURE_BASEDIR, 'api', 'dist', 'jobs'), + distJobsConfig: null, + logger: null, }, web: { routes: path.join(FIXTURE_BASEDIR, 'web', 'src', 'Routes.js'), @@ -952,6 +967,11 @@ describe('paths', () => { types: path.join(FIXTURE_BASEDIR, 'api', 'types'), models: path.join(FIXTURE_BASEDIR, 'api', 'src', 'models'), mail: path.join(FIXTURE_BASEDIR, 'api', 'src', 'mail'), + jobs: path.join(FIXTURE_BASEDIR, 'api', 'src', 'jobs'), + jobsConfig: null, + distJobs: path.join(FIXTURE_BASEDIR, 'api', 'dist', 'jobs'), + distJobsConfig: null, + logger: path.join(FIXTURE_BASEDIR, 'api', 'src', 'lib', 'logger.ts'), }, web: { routes: path.join(FIXTURE_BASEDIR, 'web', 'src', 'Routes.tsx'), diff --git a/packages/project-config/src/paths.ts b/packages/project-config/src/paths.ts index 56681b754c63..3fe2b8b89b1d 100644 --- a/packages/project-config/src/paths.ts +++ b/packages/project-config/src/paths.ts @@ -23,6 +23,11 @@ export interface NodeTargetPaths { types: string models: string mail: string + jobs: string + distJobs: string + jobsConfig: string | null + distJobsConfig: string | null + logger: string | null } export interface WebPaths { @@ -93,12 +98,14 @@ const PATH_RW_SCRIPTS = 'scripts' const PATH_API_DIR_GRAPHQL = 'api/src/graphql' const PATH_API_DIR_CONFIG = 'api/src/config' const PATH_API_DIR_MODELS = 'api/src/models' +const PATH_API_DIR_JOBS = 'api/src/jobs' const PATH_API_DIR_LIB = 'api/src/lib' const PATH_API_DIR_GENERATORS = 'api/generators' const PATH_API_DIR_SERVICES = 'api/src/services' const PATH_API_DIR_DIRECTIVES = 'api/src/directives' const PATH_API_DIR_SUBSCRIPTIONS = 'api/src/subscriptions' const PATH_API_DIR_SRC = 'api/src' +const PATH_API_DIR_DIST = 'api/dist' const PATH_WEB_ROUTES = 'web/src/Routes' // .jsx|.tsx const PATH_WEB_DIR_LAYOUTS = 'web/src/layouts/' const PATH_WEB_DIR_PAGES = 'web/src/pages/' @@ -205,10 +212,17 @@ export const getPaths = (BASE_DIR: string = getBaseDir()): Paths => { directives: path.join(BASE_DIR, PATH_API_DIR_DIRECTIVES), subscriptions: path.join(BASE_DIR, PATH_API_DIR_SUBSCRIPTIONS), src: path.join(BASE_DIR, PATH_API_DIR_SRC), - dist: path.join(BASE_DIR, 'api/dist'), + dist: path.join(BASE_DIR, PATH_API_DIR_DIST), types: path.join(BASE_DIR, 'api/types'), models: path.join(BASE_DIR, PATH_API_DIR_MODELS), mail: path.join(BASE_DIR, PATH_API_DIR_SRC, 'mail'), + jobs: path.join(path.join(BASE_DIR, PATH_API_DIR_JOBS)), + distJobs: path.join(path.join(BASE_DIR, PATH_API_DIR_DIST, 'jobs')), + jobsConfig: resolveFile(path.join(BASE_DIR, PATH_API_DIR_LIB, 'jobs')), + distJobsConfig: resolveFile( + path.join(BASE_DIR, PATH_API_DIR_DIST, 'lib', 'jobs'), + ), + logger: resolveFile(path.join(BASE_DIR, PATH_API_DIR_LIB, 'logger')), }, web: { diff --git a/packages/project-config/tsconfig.types-cjs.json b/packages/project-config/tsconfig.cjs.json similarity index 71% rename from packages/project-config/tsconfig.types-cjs.json rename to packages/project-config/tsconfig.cjs.json index 79c4cd7b71c0..fb840bc95d1c 100644 --- a/packages/project-config/tsconfig.types-cjs.json +++ b/packages/project-config/tsconfig.cjs.json @@ -2,7 +2,7 @@ "extends": "./tsconfig.json", "compilerOptions": { "outDir": "dist/cjs", - "tsBuildInfoFile": "tsconfig.types-cjs.tsbuildinfo", + "tsBuildInfoFile": "./tsconfig.cjs.tsbuildinfo", "module": "commonjs", "moduleResolution": "node" } diff --git a/packages/record/.babelrc.js b/packages/record/.babelrc.js deleted file mode 100644 index 3b2c815712d9..000000000000 --- a/packages/record/.babelrc.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = { extends: '../../babel.config.js' } diff --git a/packages/record/build.mts b/packages/record/build.mts new file mode 100644 index 000000000000..96902de6946e --- /dev/null +++ b/packages/record/build.mts @@ -0,0 +1,21 @@ +import { build, defaultBuildOptions } from '@redwoodjs/framework-tools' +import { insertCommonJsPackageJson } from '@redwoodjs/framework-tools/generateTypes' + +// ESM build +await build({ + buildOptions: { + ...defaultBuildOptions, + format: 'esm', + }, +}) + +// CJS build +await build({ + buildOptions: { + ...defaultBuildOptions, + outdir: 'dist/cjs', + }, +}) +await insertCommonJsPackageJson({ + buildFileUrl: import.meta.url, +}) diff --git a/packages/record/package.json b/packages/record/package.json index 7071794620ed..ec0a42811120 100644 --- a/packages/record/package.json +++ b/packages/record/package.json @@ -7,32 +7,39 @@ "directory": "packages/record" }, "license": "MIT", - "main": "./dist/index.js", - "types": "./dist/index.d.ts", + "type": "module", + "exports": { + ".": { + "import": "./dist/index.js", + "default": "./dist/cjs/index.js" + } + }, + "main": "./dist/cjs/index.js", + "module": "./dist/index.js", "files": [ "dist" ], "scripts": { - "build": "yarn build:js", - "build:js": "babel src -d dist --extensions \".js,.jsx,.ts,.tsx\"", + "build": "tsx ./build.mts", "build:pack": "yarn pack -o redwoodjs-record.tgz", "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx\" --ignore dist --exec \"yarn build\"", - "datamodel:parse": "node src/scripts/parse.js", + "check:package": "yarn publint", "prepublishOnly": "NODE_ENV=production yarn build", "test": "vitest run", "test:watch": "vitest watch" }, "dependencies": { - "@babel/runtime-corejs3": "7.25.0", "@prisma/client": "5.18.0", + "@redwoodjs/api": "workspace:*", "@redwoodjs/project-config": "workspace:*", - "core-js": "3.38.0" + "camelcase": "6.3.0" }, "devDependencies": { - "@babel/cli": "7.24.8", - "@babel/core": "^7.22.20", "@prisma/internals": "5.18.0", + "@redwoodjs/framework-tools": "workspace:*", "esbuild": "0.23.0", + "publint": "0.2.10", + "tsx": "4.17.0", "vitest": "2.0.5" }, "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" diff --git a/packages/router/build.ts b/packages/router/build.ts index f07a4b051713..4f9ebbf82169 100644 --- a/packages/router/build.ts +++ b/packages/router/build.ts @@ -1,32 +1,14 @@ -import { build, defaultBuildOptions } from '@redwoodjs/framework-tools' +import { buildExternalCjs, buildExternalEsm } from '@redwoodjs/framework-tools' import { generateTypesCjs, generateTypesEsm, insertCommonJsPackageJson, } from '@redwoodjs/framework-tools/generateTypes' -// ESM build -await build({ - buildOptions: { - ...defaultBuildOptions, - tsconfig: 'tsconfig.build.json', - format: 'esm', - packages: 'external', - }, -}) +await buildExternalEsm() await generateTypesEsm() -// CJS build -await build({ - buildOptions: { - ...defaultBuildOptions, - tsconfig: 'tsconfig.cjs.json', - outdir: 'dist/cjs', - packages: 'external', - }, -}) +await buildExternalCjs() await generateTypesCjs() -await insertCommonJsPackageJson({ - buildFileUrl: import.meta.url, - cjsDir: 'dist/cjs', -}) + +await insertCommonJsPackageJson({ buildFileUrl: import.meta.url }) diff --git a/packages/web/package.json b/packages/web/package.json index 43f928850759..84f0f4965aa3 100644 --- a/packages/web/package.json +++ b/packages/web/package.json @@ -131,7 +131,7 @@ "scripts": { "build": "tsx ./build.ts && yarn build:types", "build:pack": "yarn pack -o redwoodjs-web.tgz", - "build:types": "tsc --build --verbose ./tsconfig.build.json ./tsconfig.types-cjs.json", + "build:types": "tsc --build --verbose ./tsconfig.build.json ./tsconfig.cjs.json", "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx\" --ignore dist --exec \"yarn build\"", "check:attw": "tsx ./attw.ts", "check:package": "concurrently npm:check:attw yarn:publint", diff --git a/packages/web/tsconfig.cjs.json b/packages/web/tsconfig.cjs.json new file mode 100644 index 000000000000..a660cecf11ff --- /dev/null +++ b/packages/web/tsconfig.cjs.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig.build.json", + "compilerOptions": { + "outDir": "dist/cjs", + "tsBuildInfoFile": "./tsconfig.cjs.tsbuildinfo" + } +} diff --git a/tasks/clean.mjs b/tasks/clean.mjs index c19bfb9250fc..0fae2f416520 100644 --- a/tasks/clean.mjs +++ b/tasks/clean.mjs @@ -21,6 +21,6 @@ await rimraf('packages/**/tsconfig.build.tsbuildinfo', { glob: true, }) -await rimraf('packages/**/tsconfig.types-cjs.tsbuildinfo', { +await rimraf('packages/**/tsconfig.cjs.tsbuildinfo', { glob: true, }) diff --git a/tasks/framework-tools/frameworkSyncToProject.mjs b/tasks/framework-tools/frameworkSyncToProject.mjs index 65ea4978ddc1..bb39e11fc9b8 100644 --- a/tasks/framework-tools/frameworkSyncToProject.mjs +++ b/tasks/framework-tools/frameworkSyncToProject.mjs @@ -51,7 +51,7 @@ const ignored = [ /tsconfig.tsbuildinfo/, /tsconfig.build.tsbuildinfo/, - /tsconfig.types-cjs.tsbuildinfo/, + /tsconfig.cjs.tsbuildinfo/, (filePath) => IGNORE_EXTENSIONS.some((ext) => filePath.endsWith(ext)), ] @@ -263,10 +263,7 @@ async function main() { path.join(path.dirname(packageJsonPath), 'tsconfig.build.tsbuildinfo'), ) await rimraf( - path.join( - path.dirname(packageJsonPath), - 'tsconfig.types-cjs.tsbuildinfo', - ), + path.join(path.dirname(packageJsonPath), 'tsconfig.cjs.tsbuildinfo'), ) logStatus(`Building ${c.magenta(packageName)}...`) diff --git a/tasks/smoke-tests/rsc-kitchen-sink/tests/rsc-kitchen-sink.spec.ts b/tasks/smoke-tests/rsc-kitchen-sink/tests/rsc-kitchen-sink.spec.ts index 03dac4dfa2c6..6098a36f304a 100644 --- a/tasks/smoke-tests/rsc-kitchen-sink/tests/rsc-kitchen-sink.spec.ts +++ b/tasks/smoke-tests/rsc-kitchen-sink/tests/rsc-kitchen-sink.spec.ts @@ -16,6 +16,8 @@ test.beforeAll(async ({ browser }) => { await page.getByLabel('Username').fill(testUser.email) await page.getByLabel('Password').fill(testUser.password) + await page.waitForTimeout(300) + await page.getByRole('button', { name: 'Sign Up' }).click() // Wait for either... @@ -178,9 +180,13 @@ test('Server Cell', async ({ page }) => { expect(h1).toMatch(/UserExamples - userExamples/) await expect(page.getByText('Email')).toBeVisible() - await expect(page.getByText('jackie@example.com')).toBeVisible() + await expect( + page.getByText(/(jackie|bob|alice|mark)@example\.com/), + ).toBeVisible() - await expect(page.locator('tr').nth(2)).toContainText(/Name\s*jackie/) + await expect(page.locator('tr').nth(2)).toContainText( + /Name\s*(jackie|bob|alice|mark)/, + ) }) test('Server Cell - Error component', async ({ page }) => { diff --git a/tasks/smoke-tests/shared/common.ts b/tasks/smoke-tests/shared/common.ts index 4631a2395e21..7082a6b81f82 100644 --- a/tasks/smoke-tests/shared/common.ts +++ b/tasks/smoke-tests/shared/common.ts @@ -95,6 +95,8 @@ export const loginAsTestUser = async ({ await page.getByLabel('Username').fill(email) await page.getByLabel('Password').fill(password) + await page.waitForTimeout(300) + await page.getByRole('button', { name: 'Login' }).click() await page.waitForURL(redirectUrl) diff --git a/tasks/test-project/codemods/scenarioValueSuffix.js b/tasks/test-project/codemods/scenarioValueSuffix.js index 4d86edae1088..7a6441760819 100644 --- a/tasks/test-project/codemods/scenarioValueSuffix.js +++ b/tasks/test-project/codemods/scenarioValueSuffix.js @@ -1,16 +1,15 @@ +const stringWithSuffixRegex = /String\d+$/ + export default (file, api) => { const j = api.jscodeshift const root = j(file.source) - const endsWith6DigitsRE = /String.*\d{6,}$/ - - // Replaces the randomly generated value with consistent ones - + // Replaces the randomly generated value with a consistent one return root .find(j.Literal, { type: 'StringLiteral' }) .forEach((obj) => { const stringValue = obj.value.value - if (endsWith6DigitsRE.test(stringValue)) { + if (stringWithSuffixRegex.test(stringValue)) { obj.value.value = `String${obj.value.loc.start.line}` } }) diff --git a/yarn.lock b/yarn.lock index e8e94d3dddb3..1ad808bf70da 100644 --- a/yarn.lock +++ b/yarn.lock @@ -7362,13 +7362,13 @@ __metadata: resolution: "@redwoodjs/auth-auth0-web@workspace:packages/auth-providers/auth0/web" dependencies: "@auth0/auth0-spa-js": "npm:2.1.3" - "@babel/cli": "npm:7.24.8" - "@babel/core": "npm:^7.22.20" - "@babel/runtime-corejs3": "npm:7.25.0" "@redwoodjs/auth": "workspace:*" + "@redwoodjs/framework-tools": "workspace:*" "@types/react": "npm:^18.2.55" - core-js: "npm:3.38.0" + concurrently: "npm:8.2.2" + publint: "npm:0.2.10" react: "npm:19.0.0-rc-8269d55d-20240802" + tsx: "npm:4.17.0" typescript: "npm:5.5.4" vitest: "npm:2.0.5" peerDependencies: @@ -7416,14 +7416,14 @@ __metadata: resolution: "@redwoodjs/auth-azure-active-directory-web@workspace:packages/auth-providers/azureActiveDirectory/web" dependencies: "@azure/msal-browser": "npm:2.39.0" - "@babel/cli": "npm:7.24.8" - "@babel/core": "npm:^7.22.20" - "@babel/runtime-corejs3": "npm:7.25.0" "@redwoodjs/auth": "workspace:*" + "@redwoodjs/framework-tools": "workspace:*" "@types/netlify-identity-widget": "npm:1.9.6" "@types/react": "npm:^18.2.55" - core-js: "npm:3.38.0" + concurrently: "npm:8.2.2" + publint: "npm:0.2.10" react: "npm:19.0.0-rc-8269d55d-20240802" + tsx: "npm:4.17.0" typescript: "npm:5.5.4" vitest: "npm:2.0.5" peerDependencies: @@ -7467,15 +7467,15 @@ __metadata: version: 0.0.0-use.local resolution: "@redwoodjs/auth-clerk-web@workspace:packages/auth-providers/clerk/web" dependencies: - "@babel/cli": "npm:7.24.8" - "@babel/core": "npm:^7.22.20" - "@babel/runtime-corejs3": "npm:7.25.0" "@clerk/clerk-react": "npm:4.32.3" "@clerk/types": "npm:3.65.3" "@redwoodjs/auth": "workspace:*" + "@redwoodjs/framework-tools": "workspace:*" "@types/react": "npm:^18.2.55" - core-js: "npm:3.38.0" + concurrently: "npm:8.2.2" + publint: "npm:0.2.10" react: "npm:19.0.0-rc-8269d55d-20240802" + tsx: "npm:4.17.0" typescript: "npm:5.5.4" vitest: "npm:2.0.5" peerDependencies: @@ -7672,14 +7672,14 @@ __metadata: version: 0.0.0-use.local resolution: "@redwoodjs/auth-netlify-web@workspace:packages/auth-providers/netlify/web" dependencies: - "@babel/cli": "npm:7.24.8" - "@babel/core": "npm:^7.22.20" - "@babel/runtime-corejs3": "npm:7.25.0" "@redwoodjs/auth": "workspace:*" + "@redwoodjs/framework-tools": "workspace:*" "@types/netlify-identity-widget": "npm:1.9.6" "@types/react": "npm:^18.2.55" - core-js: "npm:3.38.0" + concurrently: "npm:8.2.2" + publint: "npm:0.2.10" react: "npm:19.0.0-rc-8269d55d-20240802" + tsx: "npm:4.17.0" typescript: "npm:5.5.4" vitest: "npm:2.0.5" peerDependencies: @@ -7747,15 +7747,15 @@ __metadata: version: 0.0.0-use.local resolution: "@redwoodjs/auth-supabase-web@workspace:packages/auth-providers/supabase/web" dependencies: - "@babel/cli": "npm:7.24.8" - "@babel/core": "npm:^7.22.20" - "@babel/runtime-corejs3": "npm:7.25.0" "@redwoodjs/auth": "workspace:*" + "@redwoodjs/framework-tools": "workspace:*" "@supabase/ssr": "npm:0.4.0" "@supabase/supabase-js": "npm:2.45.1" "@types/react": "npm:^18.2.55" - core-js: "npm:3.38.0" + concurrently: "npm:8.2.2" + publint: "npm:0.2.10" react: "npm:19.0.0-rc-8269d55d-20240802" + tsx: "npm:4.17.0" typescript: "npm:5.5.4" vitest: "npm:2.0.5" peerDependencies: @@ -7804,14 +7804,14 @@ __metadata: version: 0.0.0-use.local resolution: "@redwoodjs/auth-supertokens-web@workspace:packages/auth-providers/supertokens/web" dependencies: - "@babel/cli": "npm:7.24.8" - "@babel/core": "npm:^7.22.20" - "@babel/runtime-corejs3": "npm:7.25.0" "@redwoodjs/auth": "workspace:*" + "@redwoodjs/framework-tools": "workspace:*" "@types/react": "npm:^18.2.55" - core-js: "npm:3.38.0" + concurrently: "npm:8.2.2" + publint: "npm:0.2.10" react: "npm:19.0.0-rc-8269d55d-20240802" supertokens-auth-react: "npm:0.39.1" + tsx: "npm:4.17.0" typescript: "npm:5.5.4" vitest: "npm:2.0.5" peerDependencies: @@ -8201,9 +8201,7 @@ __metadata: version: 0.0.0-use.local resolution: "@redwoodjs/forms@workspace:packages/forms" dependencies: - "@babel/cli": "npm:7.24.8" - "@babel/core": "npm:^7.22.20" - "@babel/runtime-corejs3": "npm:7.25.0" + "@redwoodjs/framework-tools": "workspace:*" "@testing-library/dom": "npm:9.3.4" "@testing-library/jest-dom": "npm:6.4.8" "@testing-library/react": "npm:14.3.1" @@ -8211,13 +8209,15 @@ __metadata: "@types/pascalcase": "npm:1.0.3" "@types/react": "npm:^18.2.55" "@types/react-dom": "npm:^18.2.19" - core-js: "npm:3.38.0" + concurrently: "npm:8.2.2" graphql: "npm:16.9.0" nodemon: "npm:3.1.4" pascalcase: "npm:1.0.0" + publint: "npm:0.2.10" react: "npm:19.0.0-rc-8269d55d-20240802" react-dom: "npm:19.0.0-rc-8269d55d-20240802" react-hook-form: "npm:7.52.2" + tsx: "npm:4.17.0" typescript: "npm:5.5.4" vitest: "npm:2.0.5" peerDependencies: @@ -8343,6 +8343,25 @@ __metadata: languageName: unknown linkType: soft +"@redwoodjs/jobs@workspace:packages/jobs": + version: 0.0.0-use.local + resolution: "@redwoodjs/jobs@workspace:packages/jobs" + dependencies: + "@prisma/client": "npm:5.18.0" + "@redwoodjs/cli-helpers": "workspace:*" + "@redwoodjs/framework-tools": "workspace:*" + "@redwoodjs/project-config": "workspace:*" + concurrently: "npm:8.2.2" + publint: "npm:0.2.10" + tsx: "npm:4.17.0" + typescript: "npm:5.5.4" + vitest: "npm:2.0.5" + bin: + rw-jobs: ./dist/bins/rw-jobs.js + rw-jobs-worker: ./dist/bins/rw-jobs-worker.js + languageName: unknown + linkType: soft + "@redwoodjs/mailer-core@workspace:*, @redwoodjs/mailer-core@workspace:packages/mailer/core": version: 0.0.0-use.local resolution: "@redwoodjs/mailer-core@workspace:packages/mailer/core" @@ -8456,9 +8475,8 @@ __metadata: version: 0.0.0-use.local resolution: "@redwoodjs/prerender@workspace:packages/prerender" dependencies: - "@babel/cli": "npm:7.24.8" - "@babel/core": "npm:^7.22.20" - "@babel/runtime-corejs3": "npm:7.25.0" + "@redwoodjs/babel-config": "workspace:*" + "@redwoodjs/framework-tools": "workspace:*" "@redwoodjs/project-config": "workspace:*" "@redwoodjs/router": "workspace:*" "@redwoodjs/structure": "workspace:*" @@ -8468,9 +8486,11 @@ __metadata: babel-plugin-ignore-html-and-css-imports: "npm:0.1.0" babel-plugin-tester: "npm:11.0.4" cheerio: "npm:1.0.0" - core-js: "npm:3.38.0" + concurrently: "npm:8.2.2" graphql: "npm:16.9.0" mime-types: "npm:2.1.35" + publint: "npm:0.2.10" + tsx: "npm:4.17.0" typescript: "npm:5.5.4" vitest: "npm:2.0.5" peerDependencies: @@ -8533,14 +8553,15 @@ __metadata: version: 0.0.0-use.local resolution: "@redwoodjs/record@workspace:packages/record" dependencies: - "@babel/cli": "npm:7.24.8" - "@babel/core": "npm:^7.22.20" - "@babel/runtime-corejs3": "npm:7.25.0" "@prisma/client": "npm:5.18.0" "@prisma/internals": "npm:5.18.0" + "@redwoodjs/api": "workspace:*" + "@redwoodjs/framework-tools": "workspace:*" "@redwoodjs/project-config": "workspace:*" - core-js: "npm:3.38.0" + camelcase: "npm:6.3.0" esbuild: "npm:0.23.0" + publint: "npm:0.2.10" + tsx: "npm:4.17.0" vitest: "npm:2.0.5" languageName: unknown linkType: soft