@@ -20,7 +20,6 @@ import Registry.App.Prelude
2020
2121import Affjax.Node as Affjax.Node
2222import Affjax.ResponseFormat as ResponseFormat
23- import Affjax.StatusCode (StatusCode (..))
2423import Data.Array as Array
2524import Data.Exists as Exists
2625import Data.HTTP.Method (Method (..))
@@ -34,6 +33,7 @@ import Registry.App.Effect.Cache as Cache
3433import Registry.App.Effect.Log (LOG )
3534import Registry.App.Effect.Log as Log
3635import Registry.Constants as Constants
36+ import Registry.Foreign.S3 (Space )
3737import Registry.Foreign.S3 as S3
3838import Registry.PackageName as PackageName
3939import Registry.Version as Version
@@ -106,13 +106,13 @@ connectS3 key = do
106106 let bucket = " purescript-registry"
107107 let space = " ams3.digitaloceanspaces.com"
108108 Log .debug $ " Connecting to the bucket " <> bucket <> " at space " <> space <> " with public key " <> key.key
109- Run .liftAff (withBackoff' (Aff .attempt (S3 .connect key " ams3.digitaloceanspaces.com" bucket))) >>= case _ of
110- Nothing ->
109+ Run .liftAff (withRetryOnTimeout (Aff .attempt (S3 .connect key " ams3.digitaloceanspaces.com" bucket))) >>= case _ of
110+ Cancelled ->
111111 Except .throw " Timed out when attempting to connect to S3 storage backend."
112- Just ( Left err) -> do
112+ Failed err -> do
113113 Log .error $ " Failed to connect to S3 due to an exception: " <> Aff .message err
114114 Except .throw " Could not connect to storage backend."
115- Just ( Right connection) -> do
115+ Succeeded connection -> do
116116 Log .debug " Connected to S3!"
117117 pure connection
118118
@@ -126,17 +126,10 @@ handleS3 :: forall r a. S3Env -> Storage a -> Run (LOG + AFF + EFFECT + r) a
126126handleS3 env = Cache .interpret _storageCache (Cache .handleFs env.cache) <<< case _ of
127127 Query name reply -> map (map reply) Except .runExcept do
128128 s3 <- connectS3 env.s3
129- resources <- Run .liftAff (withBackoff' (S3 .listObjects s3 { prefix: PackageName .print name <> " /" })) >>= case _ of
130- Nothing -> do
131- Log .error $ " Failed to list S3 objects for " <> PackageName .print name <> " because the process timed out."
132- Except .throw $ " Could not upload package " <> PackageName .print name <> " due to an error connecting to the storage backend."
133- Just objects ->
134- pure $ map _.key objects
135- pure $ Set .fromFoldable
136- $ resources
137- >>= \resource -> do
138- { name: parsedName, version } <- Array .fromFoldable $ parsePackagePath resource
139- version <$ guard (name == parsedName)
129+ resources <- Except .rethrow =<< Run .liftAff (withRetryListObjects s3 name)
130+ pure $ Set .fromFoldable $ resources >>= \resource -> do
131+ { name: parsedName, version } <- Array .fromFoldable $ parsePackagePath resource
132+ version <$ guard (name == parsedName)
140133
141134 Download name version path reply -> map (map reply) Except .runExcept do
142135 let package = formatPackageVersion name version
@@ -167,24 +160,21 @@ handleS3 env = Cache.interpret _storageCache (Cache.handleFs env.cache) <<< case
167160
168161 Log .debug $ " Read file for " <> package <> " , now uploading to " <> packagePath <> " ..."
169162 s3 <- connectS3 env.s3
170- published <- Run .liftAff (withBackoff' (S3 .listObjects s3 { prefix: PackageName .print name <> " /" })) >>= case _ of
171- Nothing -> do
172- Log .error $ " Failed to list S3 objects for " <> PackageName .print name <> " because the process timed out."
173- Except .throw $ " Could not upload package " <> package <> " due to an error connecting to the storage backend."
174- Just objects ->
175- pure $ map _.key objects
176-
163+ published <- Except .rethrow =<< Run .liftAff (withRetryListObjects s3 name)
177164 if Array .elem packagePath published then do
178165 Log .error $ packagePath <> " already exists on S3."
179166 Except .throw $ " Could not upload " <> package <> " because a package at " <> formatPackageUrl name version <> " already exists."
180167 else do
181168 Log .debug $ " Uploading release to the bucket at path " <> packagePath
182169 let putParams = { key: packagePath, body: buffer, acl: S3.PublicRead }
183- Run .liftAff (withBackoff' (S3 .putObject s3 putParams)) >>= case _ of
184- Nothing -> do
185- Log .error " Failed to put object to S3 because the process timed out."
170+ Run .liftAff (withRetryOnTimeout (Aff .attempt (S3 .putObject s3 putParams))) >>= case _ of
171+ Cancelled -> do
172+ Log .error " Failed to upload object to S3 because the process timed out."
173+ Except .throw $ " Could not upload package " <> package <> " due to an error connecting to the storage backend."
174+ Failed error -> do
175+ Log .error $ " Failed to upload object to S3 because of an exception: " <> Aff .message error
186176 Except .throw $ " Could not upload package " <> package <> " due to an error connecting to the storage backend."
187- Just _ ->
177+ Succeeded _ ->
188178 Log .info $ " Uploaded " <> package <> " to the bucket at path " <> packagePath
189179
190180 Delete name version reply -> map (map reply) Except .runExcept do
@@ -194,21 +184,18 @@ handleS3 env = Cache.interpret _storageCache (Cache.handleFs env.cache) <<< case
194184
195185 Log .debug $ " Deleting " <> package
196186 s3 <- connectS3 env.s3
197- published <- Run .liftAff (withBackoff' (S3 .listObjects s3 { prefix: PackageName .print name <> " /" })) >>= case _ of
198- Nothing -> do
199- Log .error $ " Failed to delete " <> package <> " because the process timed out when attempting to list objects at " <> packagePath <> " from S3."
200- Except .throw $ " Could not delete " <> package <> " from the storage backend."
201- Just objects ->
202- pure $ map _.key objects
203-
187+ published <- Except .rethrow =<< Run .liftAff (withRetryListObjects s3 name)
204188 if Array .elem packagePath published then do
205189 Log .debug $ " Deleting release from the bucket at path " <> packagePath
206190 let deleteParams = { key: packagePath }
207- Run .liftAff (withBackoff' ( S3 .deleteObject s3 deleteParams)) >>= case _ of
208- Nothing -> do
191+ Run .liftAff (withRetryOnTimeout ( Aff .attempt ( S3 .deleteObject s3 deleteParams) )) >>= case _ of
192+ Cancelled -> do
209193 Log .error $ " Timed out when attempting to delete the release of " <> package <> " from S3 at the path " <> packagePath
210194 Except .throw $ " Could not delete " <> package <> " from the storage backend."
211- Just _ -> do
195+ Failed error -> do
196+ Log .error $ " Failed to delete object from S3 because of an exception: " <> Aff .message error
197+ Except .throw $ " Could not delete package " <> package <> " due to an error connecting to the storage backend."
198+ Succeeded _ -> do
212199 Log .debug $ " Deleted release of " <> package <> " from S3 at the path " <> packagePath
213200 pure unit
214201 else do
@@ -253,7 +240,7 @@ downloadS3 name version = do
253240 packageUrl = formatPackageUrl name version
254241
255242 Log .debug $ " Downloading " <> package <> " from " <> packageUrl
256- response <- Run .liftAff $ withBackoff' $ Affjax.Node .request $ Affjax.Node .defaultRequest
243+ response <- Run .liftAff $ withRetryRequest' $ Affjax.Node .defaultRequest
257244 { method = Left GET
258245 , responseFormat = ResponseFormat .arrayBuffer
259246 , url = packageUrl
@@ -262,22 +249,35 @@ downloadS3 name version = do
262249 -- TODO: Rely on the metadata to check the size and hash? Or do we not care
263250 -- for registry-internal operations?
264251 case response of
265- Nothing -> do
252+ Cancelled -> do
266253 Log .error $ " Failed to download " <> package <> " from " <> packageUrl <> " because of a connection timeout."
267254 Except .throw $ " Failed to download " <> package <> " from the storage backend."
268- Just ( Left error) -> do
255+ Failed ( AffjaxError error) -> do
269256 Log .error $ " Failed to download " <> package <> " from " <> packageUrl <> " because of an HTTP error: " <> Affjax.Node .printError error
270257 Except .throw $ " Could not download " <> package <> " from the storage backend."
271- Just ( Right { status, body }) | status /= StatusCode 200 -> do
258+ Failed ( StatusError { status, body }) -> do
272259 buffer <- Run .liftEffect $ Buffer .fromArrayBuffer body
273260 bodyString <- Run .liftEffect $ Buffer .toString UTF8 (buffer :: Buffer )
274- Log .error $ " Failed to download " <> package <> " from " <> packageUrl <> " because of a non-200 status code (" <> show status <> " ) with body " <> bodyString
261+ Log .error $ " Failed to download " <> package <> " from " <> packageUrl <> " because of a bad status code (" <> show status <> " ) with body " <> bodyString
275262 Except .throw $ " Could not download " <> package <> " from the storage backend."
276- Just ( Right { body }) -> do
263+ Succeeded { body } -> do
277264 Log .debug $ " Successfully downloaded " <> package <> " into a buffer."
278265 buffer :: Buffer <- Run .liftEffect $ Buffer .fromArrayBuffer body
279266 pure buffer
280267
268+ withRetryListObjects :: Space -> PackageName -> Aff (Either String (Array String ))
269+ withRetryListObjects s3 name = do
270+ let package = PackageName .print name
271+ result <- withRetry (defaultRetry { retryOnFailure = \attempt _ -> attempt < 3 }) do
272+ Aff .attempt (S3 .listObjects s3 { prefix: package <> " /" })
273+ pure $ case result of
274+ Cancelled -> do
275+ Left $ " Failed to list S3 objects for " <> package <> " because the process timed out."
276+ Failed error -> do
277+ Left $ " Failed to list S3 objects for " <> package <> " because of an exception: " <> Aff .message error
278+ Succeeded objects ->
279+ pure $ map _.key objects
280+
281281-- | A key type for the storage cache. Only supports packages identified by
282282-- | their name and version.
283283data StorageCache (c :: Type -> Type -> Type ) a = Package PackageName Version (c Buffer a )
0 commit comments