Add path mapper definitions for upload locations
This change updates the path mapper to be able to specify upload management locations. This includes a startedat file, which contains the RFC3339 formatted start time of the upload and the actual data file. Signed-off-by: Stephen J Day <stephen.day@docker.com>master
							parent
							
								
									75c5916dde
								
							
						
					
					
						commit
						219bd48c24
					
				| 
						 | 
					@ -23,20 +23,26 @@ const storagePathVersion = "v2"
 | 
				
			||||||
// 						<manifests by tag name>
 | 
					// 						<manifests by tag name>
 | 
				
			||||||
// 					-> layers/
 | 
					// 					-> layers/
 | 
				
			||||||
// 						<layer links to blob store>
 | 
					// 						<layer links to blob store>
 | 
				
			||||||
 | 
					// 					-> uploads/<uuid>
 | 
				
			||||||
 | 
					// 						data
 | 
				
			||||||
 | 
					// 						startedat
 | 
				
			||||||
//			-> blob/<algorithm>
 | 
					//			-> blob/<algorithm>
 | 
				
			||||||
//				<split directory content addressable storage>
 | 
					//				<split directory content addressable storage>
 | 
				
			||||||
//
 | 
					//
 | 
				
			||||||
// There are few important components to this path layout. First, we have the
 | 
					// There are few important components to this path layout. First, we have the
 | 
				
			||||||
// repository store identified by name. This contains the image manifests and
 | 
					// repository store identified by name. This contains the image manifests and
 | 
				
			||||||
// a layer store with links to CAS blob ids. Outside of the named repo area,
 | 
					// a layer store with links to CAS blob ids. Upload coordination data is also
 | 
				
			||||||
// we have the the blob store. It contains the actual layer data and any other
 | 
					// stored here. Outside of the named repo area, we have the the blob store. It
 | 
				
			||||||
// data that can be referenced by a CAS id.
 | 
					// contains the actual layer data and any other data that can be referenced by
 | 
				
			||||||
 | 
					// a CAS id.
 | 
				
			||||||
//
 | 
					//
 | 
				
			||||||
// We cover the path formats implemented by this path mapper below.
 | 
					// We cover the path formats implemented by this path mapper below.
 | 
				
			||||||
//
 | 
					//
 | 
				
			||||||
// 	manifestPathSpec: <root>/v2/repositories/<name>/manifests/<tag>
 | 
					// 	manifestPathSpec: <root>/v2/repositories/<name>/manifests/<tag>
 | 
				
			||||||
// 	layerLinkPathSpec: <root>/v2/repositories/<name>/layers/tarsum/<tarsum version>/<tarsum hash alg>/<tarsum hash>
 | 
					// 	layerLinkPathSpec: <root>/v2/repositories/<name>/layers/tarsum/<tarsum version>/<tarsum hash alg>/<tarsum hash>
 | 
				
			||||||
// 	blobPathSpec: <root>/v2/blob/<algorithm>/<first two hex bytes of digest>/<hex digest>
 | 
					// 	blobPathSpec: <root>/v2/blob/<algorithm>/<first two hex bytes of digest>/<hex digest>
 | 
				
			||||||
 | 
					// 	uploadDataPathSpec: <root>/v2/repositories/<name>/uploads/<uuid>/data
 | 
				
			||||||
 | 
					// 	uploadStartedAtPathSpec: <root>/v2/repositories/<name>/uploads/<uuid>/startedat
 | 
				
			||||||
//
 | 
					//
 | 
				
			||||||
// For more information on the semantic meaning of each path and their
 | 
					// For more information on the semantic meaning of each path and their
 | 
				
			||||||
// contents, please see the path spec documentation.
 | 
					// contents, please see the path spec documentation.
 | 
				
			||||||
| 
						 | 
					@ -103,6 +109,10 @@ func (pm *pathMapper) path(spec pathSpec) (string, error) {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		blobPathPrefix := append(rootPrefix, "blob")
 | 
							blobPathPrefix := append(rootPrefix, "blob")
 | 
				
			||||||
		return path.Join(append(blobPathPrefix, components...)...), nil
 | 
							return path.Join(append(blobPathPrefix, components...)...), nil
 | 
				
			||||||
 | 
						case uploadDataPathSpec:
 | 
				
			||||||
 | 
							return path.Join(append(repoPrefix, v.name, "uploads", v.uuid, "data")...), nil
 | 
				
			||||||
 | 
						case uploadStartedAtPathSpec:
 | 
				
			||||||
 | 
							return path.Join(append(repoPrefix, v.name, "uploads", v.uuid, "startedat")...), nil
 | 
				
			||||||
	default:
 | 
						default:
 | 
				
			||||||
		// TODO(sday): This is an internal error. Ensure it doesn't escape (panic?).
 | 
							// TODO(sday): This is an internal error. Ensure it doesn't escape (panic?).
 | 
				
			||||||
		return "", fmt.Errorf("unknown path spec: %#v", v)
 | 
							return "", fmt.Errorf("unknown path spec: %#v", v)
 | 
				
			||||||
| 
						 | 
					@ -170,6 +180,29 @@ type blobPathSpec struct {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
func (blobPathSpec) pathSpec() {}
 | 
					func (blobPathSpec) pathSpec() {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					// uploadDataPathSpec defines the path parameters of the data file for
 | 
				
			||||||
 | 
					// uploads.
 | 
				
			||||||
 | 
					type uploadDataPathSpec struct {
 | 
				
			||||||
 | 
						name string
 | 
				
			||||||
 | 
						uuid string
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					func (uploadDataPathSpec) pathSpec() {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					// uploadDataPathSpec defines the path parameters for the file that stores the
 | 
				
			||||||
 | 
					// start time of an uploads. If it is missing, the upload is considered
 | 
				
			||||||
 | 
					// unknown. Admittedly, the presence of this file is an ugly hack to make sure
 | 
				
			||||||
 | 
					// we have a way to cleanup old or stalled uploads that doesn't rely on driver
 | 
				
			||||||
 | 
					// FileInfo behavior. If we come up with a more clever way to do this, we
 | 
				
			||||||
 | 
					// should remove this file immediately and rely on the startetAt field from
 | 
				
			||||||
 | 
					// the client to enforce time out policies.
 | 
				
			||||||
 | 
					type uploadStartedAtPathSpec struct {
 | 
				
			||||||
 | 
						name string
 | 
				
			||||||
 | 
						uuid string
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					func (uploadStartedAtPathSpec) pathSpec() {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
// digestPathComoponents provides a consistent path breakdown for a given
 | 
					// digestPathComoponents provides a consistent path breakdown for a given
 | 
				
			||||||
// digest. For a generic digest, it will be as follows:
 | 
					// digest. For a generic digest, it will be as follows:
 | 
				
			||||||
//
 | 
					//
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -43,10 +43,18 @@ func TestPathMapper(t *testing.T) {
 | 
				
			||||||
			expected: "/pathmapper-test/blob/tarsum/v1/sha256/ab/abcdefabcdefabcdef908909909",
 | 
								expected: "/pathmapper-test/blob/tarsum/v1/sha256/ab/abcdefabcdefabcdef908909909",
 | 
				
			||||||
		},
 | 
							},
 | 
				
			||||||
		{
 | 
							{
 | 
				
			||||||
			spec: blobPathSpec{
 | 
								spec: uploadDataPathSpec{
 | 
				
			||||||
				digest: digest.Digest("tarsum+sha256:abcdefabcdefabcdef908909909"),
 | 
									name: "foo/bar",
 | 
				
			||||||
 | 
									uuid: "asdf-asdf-asdf-adsf",
 | 
				
			||||||
			},
 | 
								},
 | 
				
			||||||
			expected: "/pathmapper-test/blob/tarsum/v0/sha256/ab/abcdefabcdefabcdef908909909",
 | 
								expected: "/pathmapper-test/repositories/foo/bar/uploads/asdf-asdf-asdf-adsf/data",
 | 
				
			||||||
 | 
							},
 | 
				
			||||||
 | 
							{
 | 
				
			||||||
 | 
								spec: uploadStartedAtPathSpec{
 | 
				
			||||||
 | 
									name: "foo/bar",
 | 
				
			||||||
 | 
									uuid: "asdf-asdf-asdf-adsf",
 | 
				
			||||||
 | 
								},
 | 
				
			||||||
 | 
								expected: "/pathmapper-test/repositories/foo/bar/uploads/asdf-asdf-asdf-adsf/startedat",
 | 
				
			||||||
		},
 | 
							},
 | 
				
			||||||
	} {
 | 
						} {
 | 
				
			||||||
		p, err := pm.path(testcase.spec)
 | 
							p, err := pm.path(testcase.spec)
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
		Loading…
	
		Reference in New Issue