-
Notifications
You must be signed in to change notification settings - Fork 131
/
Copy pathFetch.purs
537 lines (487 loc) Β· 27.2 KB
/
Fetch.purs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
module Spago.Command.Fetch
( PackageTransitiveDeps
, FetchEnv
, FetchEnvRow
, FetchOpts
, toAllDependencies
, getWorkspacePackageDeps
, getTransitiveDeps
, getTransitiveDepsFromRegistry
, run
) where
import Spago.Prelude
import Affjax.Node as Http
import Affjax.ResponseFormat as Response
import Affjax.StatusCode (StatusCode(..))
import Control.Monad.State as State
import Data.Array as Array
import Data.Array.NonEmpty as NEA
import Data.Codec.Argonaut as CA
import Data.Either as Either
import Data.HTTP.Method as Method
import Data.Int as Int
import Data.Map as Map
import Data.Newtype (wrap)
import Data.Set as Set
import Data.Traversable (sequence)
import Effect.Ref as Ref
import Node.Buffer as Buffer
import Node.Encoding as Encoding
import Node.Path as Path
import Registry.Internal.Codec as Internal.Codec
import Registry.Metadata as Metadata
import Registry.PackageName as PackageName
import Registry.Range as Range
import Registry.Sha256 as Sha256
import Registry.Solver as Registry.Solver
import Registry.Version as Registry.Version
import Registry.Version as Version
import Spago.Config (BuildType(..), Dependencies(..), GitPackage, Package(..), PackageMap, Workspace, WorkspacePackage)
import Spago.Config as Config
import Spago.Db as Db
import Spago.FS as FS
import Spago.Git as Git
import Spago.Lock (LockEntryData(..))
import Spago.Lock as Lock
import Spago.Paths as Paths
import Spago.Purs as Purs
import Spago.Registry as Registry
import Spago.Repl as Repl
import Spago.Tar as Tar
type PackageTransitiveDeps = Map PackageName PackageMap
type FetchEnvRow a =
( getRegistry :: Spago (Registry.PreRegistryEnv ()) Registry.RegistryFunctions
, workspace :: Workspace
, logOptions :: LogOptions
, offline :: OnlineStatus
, purs :: Purs.Purs
, git :: Git.Git
, db :: Db.Db
| a
)
type FetchEnv a = Record (FetchEnvRow a)
type FetchOpts =
{ packages :: Array PackageName
, ensureRanges :: Boolean
, isTest :: Boolean
, isRepl :: Boolean
}
run
:: forall a
. FetchOpts
-> Spago (FetchEnv a) PackageTransitiveDeps
run { packages: packagesToInstall, ensureRanges, isTest, isRepl } = do
logDebug $ "Requested to install these packages: " <> printJson (CA.array PackageName.codec) packagesToInstall
{ workspace: currentWorkspace, offline } <- ask
let
installingPackages = not $ Array.null packagesToInstall
-- If we need to install new packages then we need to zero the current lockfile, we're going to need a new one
workspace = case installingPackages of
false -> currentWorkspace
true -> currentWorkspace { packageSet = currentWorkspace.packageSet { lockfile = Nothing } }
getPackageConfigPath errorMessageEnd = do
case workspace.selected of
Just { path, doc, package } -> pure { configPath: Path.concat [ path, "spago.yaml" ], yamlDoc: doc, package }
Nothing -> case workspace.rootPackage of
Just rootPackage -> do pure { configPath: "spago.yaml", yamlDoc: workspace.doc, package: rootPackage }
Nothing -> die
[ "No package found in the root configuration."
, "Please use the `-p` flag to select a package " <> errorMessageEnd
]
-- We compute the transitive deps for all the packages in the workspace, but keep them
-- split by package - we need all of them so we can stash them in the lockfile, but we
-- are going to only download the ones that we need to, if e.g. there's a package selected
dependencies <- traverse getTransitiveDeps
$ Map.fromFoldable
$ map
( \p -> Tuple p.package.name case workspace.selected of
-- If we are installing packages, we need to add the new deps to the selected package
Just selected | selected.package.name == p.package.name -> case isTest of
false -> p { package { dependencies = p.package.dependencies <> Dependencies (Map.fromFoldable $ map (_ /\ Nothing) packagesToInstall) } }
true -> p { package { test = p.package.test # map (\t -> t { dependencies = t.dependencies <> Dependencies (Map.fromFoldable $ map (_ /\ Nothing) packagesToInstall) }) } }
_ -> p
)
$ Config.getWorkspacePackages workspace.packageSet
-- write to the config file if we are adding new packages
when installingPackages do
{ configPath, package, yamlDoc } <- getPackageConfigPath "to install your packages in."
let packageDependencies = Map.keys $ unwrap package.dependencies
-- Prevent users from installing a circular dependency
let packages = Array.filter (\p -> p /= package.name) packagesToInstall
let overlappingPackages = Set.intersection packageDependencies (Set.fromFoldable packages)
unless (Set.isEmpty overlappingPackages) do
logWarn
$ [ toDoc "You tried to install some packages that are already present in the configuration, proceeding anyways:" ]
<> map (indent <<< toDoc <<< append "- " <<< PackageName.print) (Array.fromFoldable overlappingPackages)
logInfo $ "Adding " <> show (Array.length packages) <> " packages to the config in " <> configPath
liftEffect $ Config.addPackagesToConfig yamlDoc isTest packages
liftAff $ FS.writeYamlDocFile configPath yamlDoc
-- if the flag is selected, we kick off the process of adding ranges to the config
when ensureRanges do
{ configPath, package, yamlDoc } <- getPackageConfigPath "in which to add ranges."
logInfo $ "Adding ranges to dependencies to the config in " <> configPath
packageDeps <- (Map.lookup package.name dependencies) `justOrDieWith`
"Impossible: package dependencies must be in dependencies map"
let rangeMap = map getRangeFromPackage packageDeps
liftEffect $ Config.addRangesToConfig yamlDoc rangeMap
liftAff $ FS.writeYamlDocFile configPath yamlDoc
-- the repl needs a support package, so we add it here as a sidecar
supportPackage <- Repl.supportPackage workspace.packageSet
let
allTransitiveDeps = case isRepl of
false -> dependencies
true -> map (\packageMap -> Map.union packageMap supportPackage) dependencies
depsToFetch <- case workspace.selected of
Nothing -> pure (toAllDependencies allTransitiveDeps)
-- If there's a package selected, we only fetch the transitive deps for that one
Just p -> case Map.lookup p.package.name dependencies of
Nothing -> die "Impossible: package dependencies must be in dependencies map"
Just deps -> pure $ Map.union deps if isRepl then supportPackage else Map.empty
when (isNothing workspace.packageSet.lockfile) do
logInfo "No lockfile found, generating one..."
lockfile <- mkLockfile allTransitiveDeps
liftAff $ FS.writeYamlFile Lock.lockfileCodec "spago.lock" lockfile
logInfo "Lockfile written to spago.lock. Please commit this file."
-- then for every package we have we try to download it, and copy it in the local cache
logInfo "Downloading dependencies..."
parallelise $ (flip map) (Map.toUnfoldable depsToFetch :: Array (Tuple PackageName Package)) \(Tuple name package) -> do
let localPackageLocation = Config.getPackageLocation name package
-- first of all, we check if we have the package in the local cache. If so, we don't even do the work
unlessM (FS.exists localPackageLocation) case package of
GitPackage gitPackage -> getGitPackageInLocalCache name gitPackage
RegistryVersion v -> do
-- if the version comes from the registry then we have a longer list of things to do
let versionString = Registry.Version.print v
let packageVersion = PackageName.print name <> "@" <> versionString
-- get the metadata for the package, so we have access to the hash and other info
metadata <- Registry.getMetadata name
case (metadata >>= (\(Metadata meta) -> Either.note "Didn't find version in the metadata file" $ Map.lookup v meta.published)) of
Left err -> die $ "Couldn't read metadata, reason:\n " <> err
Right versionMetadata -> do
logDebug $ "Metadata read: " <> printJson Metadata.publishedMetadataCodec versionMetadata
-- then check if we have a tarball cached. If not, download it
let globalCachePackagePath = Path.concat [ Paths.globalCachePath, "packages", PackageName.print name ]
let archivePath = Path.concat [ globalCachePackagePath, versionString <> ".tar.gz" ]
FS.mkdirp globalCachePackagePath
-- We need to see if the tarball is there, and if we can decompress it.
-- This is because if Spago is killed while it's writing the tar, then it might leave it corrupted.
-- By checking that it's broken we can try to redownload it here.
tarExists <- FS.exists archivePath
-- unpack the tars in a temp folder, then move to local cache
let tarInnerFolder = PackageName.print name <> "-" <> Version.print v
tempDir <- mkTemp
FS.mkdirp tempDir
tarIsGood <-
if tarExists then do
logDebug $ "Trying to unpack archive to temp folder: " <> tempDir
map (either (const false) (const true)) $ liftEffect $ Tar.extract { filename: archivePath, cwd: tempDir }
else
pure false
case tarExists, tarIsGood, offline of
true, true, _ -> pure unit -- Tar exists and is good, and we already unpacked it. Happy days!
_, _, Offline -> die $ "Package " <> packageVersion <> " is not in the local cache, and Spago is running in offline mode - can't make progress."
_, _, Online -> do
let packageUrl = "https://packages.registry.purescript.org/" <> PackageName.print name <> "/" <> versionString <> ".tar.gz"
logInfo $ "Fetching package " <> packageVersion
response <- liftAff $ withBackoff' $ Http.request
( Http.defaultRequest
{ method = Left Method.GET
, responseFormat = Response.arrayBuffer
, url = packageUrl
}
)
case response of
Nothing -> die $ "Couldn't reach the registry at " <> packageUrl
Just (Left err) -> die $ "Couldn't fetch package " <> packageVersion <> ":\n " <> Http.printError err
Just (Right { status, body }) | status /= StatusCode 200 -> do
(buf :: Buffer) <- liftEffect $ Buffer.fromArrayBuffer body
bodyString <- liftEffect $ Buffer.toString Encoding.UTF8 buf
die $ "Couldn't fetch package " <> packageVersion <> ", status was not ok " <> show status <> ", got answer:\n " <> bodyString
Just (Right r@{ body: archiveArrayBuffer }) -> do
logDebug $ "Got status: " <> show r.status
-- check the size and hash of the tar against the metadata
archiveBuffer <- liftEffect $ Buffer.fromArrayBuffer archiveArrayBuffer
archiveSize <- liftEffect $ Buffer.size archiveBuffer
archiveSha <- liftEffect $ Sha256.hashBuffer archiveBuffer
unless (Int.toNumber archiveSize == versionMetadata.bytes) do
die $ "Archive fetched for " <> packageVersion <> " has a different size (" <> show archiveSize <> ") than expected (" <> show versionMetadata.bytes <> ")"
unless (archiveSha == versionMetadata.hash) do
die $ "Archive fetched for " <> packageVersion <> " has a different hash (" <> Sha256.print archiveSha <> ") than expected (" <> Sha256.print versionMetadata.hash <> ")"
-- if everything's alright we stash the tar in the global cache
logDebug $ "Fetched archive for " <> packageVersion <> ", saving it in the global cache: " <> archivePath
FS.writeFile archivePath archiveBuffer
logDebug $ "Unpacking archive to temp folder: " <> tempDir
(liftEffect $ Tar.extract { filename: archivePath, cwd: tempDir }) >>= case _ of
Right _ -> pure unit
Left err -> die [ "Failed to decode downloaded package " <> packageVersion <> ", error:", show err ]
logDebug $ "Moving extracted file to local cache:" <> localPackageLocation
FS.moveSync { src: (Path.concat [ tempDir, tarInnerFolder ]), dst: localPackageLocation }
-- Local package, no work to be done
LocalPackage _ -> pure unit
WorkspacePackage _ -> pure unit
pure dependencies
type LockfileBuilderResult =
{ workspacePackages :: Map PackageName Lock.WorkspaceLockPackage
, packages :: Map PackageName Lock.LockEntry
}
mkLockfile :: forall a. PackageTransitiveDeps -> Spago (FetchEnv a) Lock.Lockfile
mkLockfile allTransitiveDeps = do
{ workspace } <- ask
let
processPackage :: LockfileBuilderResult -> Tuple PackageName (Tuple PackageName Package) -> Spago (FetchEnv a) LockfileBuilderResult
processPackage result (Tuple workspacePackageName (Tuple dependencyName dependencyPackage)) = do
(packageDependencies :: Array PackageName) <- (Array.fromFoldable <<< Map.keys <<< fromMaybe Map.empty)
<$> getPackageDependencies dependencyName dependencyPackage
let
updatePackage package = pure $ result
{ packages = Map.alter
( case _ of
Nothing -> Just { needed_by: NEA.singleton workspacePackageName, package }
Just { needed_by } -> Just { needed_by: NEA.cons workspacePackageName needed_by, package }
)
dependencyName
result.packages
}
updateWorkspacePackage otherWorkspacePackage = pure $ result
{ workspacePackages = Map.alter
( case _ of
Nothing -> Just $ otherWorkspacePackage { needed_by = Array.singleton workspacePackageName }
Just pkg@{ needed_by } -> Just $ pkg { needed_by = Array.cons workspacePackageName needed_by }
)
dependencyName
result.workspacePackages
}
case dependencyPackage of
WorkspacePackage pkg -> updateWorkspacePackage (snd $ Config.workspacePackageToLockfilePackage pkg)
GitPackage gitPackage -> do
let packageLocation = Config.getPackageLocation dependencyName dependencyPackage
Git.getRef (Just packageLocation) >>= case _ of
Left err -> die err -- TODO maybe not die here?
Right rev -> updatePackage $ FromGit { rev, dependencies: packageDependencies, url: gitPackage.git, subdir: gitPackage.subdir }
RegistryVersion version -> do
metadata <- Registry.getMetadata dependencyName
registryVersion <- case (metadata >>= (\(Metadata meta) -> Either.note "Didn't find version in the metadata file" $ Map.lookup version meta.published)) of
Left err -> die $ "Couldn't read metadata, reason:\n " <> err
Right { hash: integrity } ->
pure { version, integrity, dependencies: packageDependencies }
updatePackage $ FromRegistry registryVersion
LocalPackage { path } -> do
updatePackage $ FromPath { path, dependencies: packageDependencies }
let
toArray :: forall k v. Map k v -> Array (Tuple k v)
toArray = Map.toUnfoldable
({ packages, workspacePackages } :: LockfileBuilderResult) <-
Array.foldM processPackage
{ workspacePackages: Map.fromFoldable $ map Config.workspacePackageToLockfilePackage (Config.getWorkspacePackages workspace.packageSet), packages: Map.empty }
(foldMap sequence $ toArray $ map toArray allTransitiveDeps)
pure
{ packages
, workspace:
{ package_set: case workspace.packageSet.buildType of
RegistrySolverBuild _ -> Nothing
PackageSetBuild info _ -> Just info
, packages: workspacePackages
, extra_packages: fromMaybe Map.empty workspace.workspaceConfig.extra_packages
}
}
toAllDependencies :: PackageTransitiveDeps -> PackageMap
toAllDependencies = foldl (Map.unionWith (\l _ -> l)) Map.empty
getGitPackageInLocalCache :: forall a. PackageName -> GitPackage -> Spago (Git.GitEnv a) Unit
getGitPackageInLocalCache name package = do
let localPackageLocation = Config.getPackageLocation name (GitPackage package)
tempDir <- mkTemp' (Just $ printJson Config.gitPackageCodec package)
logDebug $ "Cloning repo in " <> tempDir
Git.fetchRepo package tempDir >>= case _ of
Left err -> die err
Right _ -> do
logDebug $ "Repo cloned. Moving to " <> localPackageLocation
FS.mkdirp $ Path.concat [ Paths.localCachePackagesPath, PackageName.print name ]
FS.moveSync { src: tempDir, dst: localPackageLocation }
getPackageDependencies :: forall a. PackageName -> Package -> Spago (FetchEnv a) (Maybe (Map PackageName Range))
getPackageDependencies packageName package = case package of
RegistryVersion v -> do
maybeManifest <- Registry.getManifestFromIndex packageName v
pure $ map (_.dependencies <<< unwrap) maybeManifest
GitPackage p -> do
-- Note: we get the package in local cache nonetheless,
-- so we have guarantees about being able to fetch it
let packageLocation = Config.getPackageLocation packageName package
unlessM (FS.exists packageLocation) do
getGitPackageInLocalCache packageName p
case p.dependencies of
Just (Dependencies dependencies) -> pure (Just (map (fromMaybe Config.widestRange) dependencies))
Nothing -> do
readLocalDependencies case p.subdir of
Nothing -> packageLocation
Just s -> Path.concat [ packageLocation, s ]
LocalPackage p -> do
readLocalDependencies p.path
WorkspacePackage p ->
pure (Just (map (fromMaybe Config.widestRange) (unwrap $ getWorkspacePackageDeps p)))
where
-- try to see if the package has a spago config, and if it's there we read it
readLocalDependencies :: FilePath -> Spago (FetchEnv a) (Maybe (Map PackageName Range))
readLocalDependencies configLocation = do
-- TODO: make this work with manifests
Config.readConfig (Path.concat [ configLocation, "spago.yaml" ]) >>= case _ of
Right { yaml: { package: Just { dependencies: (Dependencies deps) } } } -> do
pure (Just (map (fromMaybe Config.widestRange) deps))
Right _ -> die [ "Read valid configuration from " <> configLocation, "However, there was no `package` section to be read." ]
Left err -> die [ "Could not read config at " <> configLocation, "Error: " <> err ]
getWorkspacePackageDeps :: WorkspacePackage -> Dependencies
getWorkspacePackageDeps pkg =
if pkg.hasTests then
pkg.package.dependencies <> fromMaybe mempty (map _.dependencies pkg.package.test)
else pkg.package.dependencies
type TransitiveDepsResult =
{ packages :: Map PackageName Package
, errors ::
{ cycle :: Set PackageName
, notInIndex :: Set PackageName
, notInPackageSet :: Set PackageName
}
}
getTransitiveDeps :: forall a. Config.WorkspacePackage -> Spago (FetchEnv a) PackageMap
getTransitiveDeps workspacePackage = do
let depsRanges = map (fromMaybe Config.widestRange) (unwrap $ getWorkspacePackageDeps workspacePackage)
{ workspace } <- ask
case workspace.packageSet.lockfile of
-- If we have a lockfile we can just use that - we don't even need build a plan, we can just filter the packages
-- marked as needed by the workspace package that we are processing, as we just dumped the plan in the lockfile.
Just lockfile -> do
let
allWorkspacePackages = Map.fromFoldable $ map (\p -> Tuple p.package.name (WorkspacePackage p)) (Config.getWorkspacePackages workspace.packageSet)
-- Need to filter the allWorkspacePackages by needed_by as well
workspacePackagesWeNeed = allWorkspacePackages # Map.filterWithKey \name _package -> case Map.lookup name lockfile.workspace.packages of
Nothing -> false
Just { needed_by } -> Array.elem workspacePackage.package.name needed_by
otherPackages = map (fromLockEntryData <<< _.package) $ Map.filter (\{ needed_by } -> NEA.elem workspacePackage.package.name needed_by) lockfile.packages
pure $ Map.union otherPackages workspacePackagesWeNeed
-- No lockfile, we need to build a plan from scratch, and hit the Registry and so on
Nothing -> case workspace.packageSet.buildType of
RegistrySolverBuild extraPackages -> do
plan <- getTransitiveDepsFromRegistry depsRanges extraPackages
logDebug $ "Got a plan from the Solver: " <> printJson (Internal.Codec.packageMap Version.codec) plan
pure (map RegistryVersion plan)
PackageSetBuild _info set -> getTransitiveDepsFromPackageSet set (Array.fromFoldable $ Map.keys depsRanges)
where
-- Note: here we can safely discard the dependencies because we don't need to bother about building a build plan,
-- we already built it when the lockfile was put together in the first place. All the dependency info is there so
-- that other things can use it (e.g. Nix), but Spago is not going to need it at this point.
fromLockEntryData :: LockEntryData -> Package
fromLockEntryData = case _ of
FromPath { path } -> LocalPackage { path }
FromRegistry { version } -> RegistryVersion version
FromGit { rev, dependencies, url, subdir } -> GitPackage
{ ref: rev
, dependencies: Just $ wrap $ Map.fromFoldable $ map (\p -> Tuple p Nothing) dependencies
, git: url
, subdir
}
getTransitiveDepsFromRegistry :: forall a. Map PackageName Range -> PackageMap -> Spago (FetchEnv a) (Map PackageName Version)
getTransitiveDepsFromRegistry depsRanges extraPackages = do
let
loader :: PackageName -> Spago (FetchEnv a) (Map Version (Map PackageName Range))
loader packageName = do
-- First look up in the extra packages, as they are the workspace ones, and overrides
case Map.lookup packageName extraPackages of
Just p -> map (Map.singleton (getVersionFromPackage p) <<< fromMaybe Map.empty) $ getPackageDependencies packageName p
Nothing -> do
maybeMetadata <- Registry.getMetadata packageName
let
versions = case maybeMetadata of
Right (Metadata metadata) -> Array.fromFoldable $ Map.keys metadata.published
Left _err -> []
map (Map.fromFoldable :: Array _ -> Map _ _) $ for versions \v -> do
maybeManifest <- Registry.getManifestFromIndex packageName v
let deps = fromMaybe Map.empty $ map (_.dependencies <<< unwrap) maybeManifest
pure (Tuple v deps)
maybePlan <- Registry.Solver.loadAndSolve loader depsRanges
case maybePlan of
Left errs -> die
[ toDoc "Could not solve the package dependencies, errors:"
, indent $ toDoc $ Array.fromFoldable $ map Registry.Solver.printSolverError errs
]
Right (buildPlan :: Map PackageName Version) -> do
pure buildPlan
-- | Return the transitive dependencies of a list of packages
getTransitiveDepsFromPackageSet :: forall a. PackageMap -> Array PackageName -> Spago (FetchEnv a) PackageMap
getTransitiveDepsFromPackageSet packageSet deps = do
logDebug "Getting transitive deps"
packageDependenciesCache <- liftEffect $ Ref.new Map.empty
let
memoisedGetPackageDependencies :: PackageName -> Package -> Spago (FetchEnv a) (Maybe (Map PackageName Range))
memoisedGetPackageDependencies packageName package = do
cache <- liftEffect $ Ref.read packageDependenciesCache
case Map.lookup packageName cache of
Just cached -> pure cached
Nothing -> do
-- Not cached. Compute it, write to ref, return it
res <- getPackageDependencies packageName package
liftEffect $ Ref.modify_ (Map.insert packageName res) packageDependenciesCache
pure res
printPackageError :: PackageName -> String
printPackageError p = " - " <> PackageName.print p <> "\n"
init :: TransitiveDepsResult
init = { packages: (Map.empty :: Map PackageName Package), errors: mempty }
go :: Set PackageName -> PackageName -> StateT TransitiveDepsResult (Spago (FetchEnv a)) Unit
go seen dep = do
-- We stash packages that we encountered along the way in `seen`,
-- so if we see it again we have a cycle
if Set.member dep seen then do
State.modify_ $ cycleError dep
else do
-- If the package is a transitive dependency of some other package that
-- we already met, then we don't need to do the work again
alreadyRun <- Map.member dep <$> State.gets _.packages
when (not alreadyRun)
-- If we need to compute the dependencies from scratch instead, we first look
-- in the package set to get a version number out, then use that version to
-- look it up in the index and get the dependencies
case Map.lookup dep packageSet of
Nothing -> State.modify_ $ notInPackageSetError dep
Just package -> do
maybeDeps <- State.lift $ memoisedGetPackageDependencies dep package
case maybeDeps of
Nothing -> State.modify_ $ notInIndexError dep
Just dependenciesMap -> do
-- Compare errors before and after recursively running transitive deps
errors <- State.gets _.errors
-- recur here, as we need to get the transitive tree, not just the first level
void $ forWithIndex dependenciesMap
(\dependency _ -> go (Set.insert dep seen) dependency)
-- Errors may have changed after running through the child deps
errorsAfterTransitiveDeps <- State.gets _.errors
-- Do not include the package if any child deps fail
when (errors == errorsAfterTransitiveDeps) do
State.modify_ \st -> st { packages = Map.insert dep package st.packages }
{ packages, errors } <-
State.execStateT
(for deps (go mempty))
init
when (not (Set.isEmpty errors.cycle)) do
die $ "The following packages have circular dependencies:\n" <> foldMap printPackageError (Set.toUnfoldable errors.cycle :: Array PackageName)
when (not (Set.isEmpty errors.notInPackageSet)) do
die $ "The following packages do not exist in your package set:\n" <> foldMap printPackageError errors.notInPackageSet
when (not (Set.isEmpty errors.notInIndex)) do
die $ "The following packages do not exist in the package index:\n" <> foldMap printPackageError errors.notInIndex
pure packages
-- | Given a Package, figure out a reasonable range.
-- We default to the widest range for packages that are not pointing to the Registry.
getRangeFromPackage :: Package -> Range
getRangeFromPackage = case _ of
RegistryVersion v -> Range.caret v
_ -> Config.widestRange
getVersionFromPackage :: Package -> Version
getVersionFromPackage = case _ of
RegistryVersion v -> v
_ -> unsafeFromRight $ Version.parse "0.0.0"
notInPackageSetError :: PackageName -> TransitiveDepsResult -> TransitiveDepsResult
notInPackageSetError dep result = result
{ errors { notInPackageSet = Set.insert dep result.errors.notInPackageSet } }
notInIndexError :: PackageName -> TransitiveDepsResult -> TransitiveDepsResult
notInIndexError dep result = result
{ errors { notInIndex = Set.insert dep result.errors.notInIndex } }
cycleError :: PackageName -> TransitiveDepsResult -> TransitiveDepsResult
cycleError dep result = result
{ errors { cycle = Set.insert dep result.errors.cycle } }