mirror of https://github.com/gogits/gogs.git
Unknown
12 years ago
3 changed files with 206 additions and 2 deletions
@ -0,0 +1,204 @@
|
||||
// Copyright 2013 gopm authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package doc |
||||
|
||||
import ( |
||||
"archive/tar" |
||||
"bytes" |
||||
"compress/gzip" |
||||
"errors" |
||||
"io" |
||||
"net/http" |
||||
"os" |
||||
"path" |
||||
"regexp" |
||||
"strings" |
||||
) |
||||
|
||||
var ( |
||||
BitbucketPattern = regexp.MustCompile(`^bitbucket\.org/(?P<owner>[a-z0-9A-Z_.\-]+)/(?P<repo>[a-z0-9A-Z_.\-]+)(?P<dir>/[a-z0-9A-Z_.\-/]*)?$`) |
||||
bitbucketEtagRe = regexp.MustCompile(`^(hg|git)-`) |
||||
) |
||||
|
||||
// GetBitbucketDoc downloads tarball from bitbucket.org.
|
||||
func GetBitbucketDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, cmdFlags map[string]bool) ([]string, error) { |
||||
// Check version control.
|
||||
if m := bitbucketEtagRe.FindStringSubmatch(nod.Value); m != nil { |
||||
match["vcs"] = m[1] |
||||
} else { |
||||
var repo struct { |
||||
Scm string |
||||
} |
||||
if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}", match), &repo); err != nil { |
||||
return nil, err |
||||
} |
||||
match["vcs"] = repo.Scm |
||||
} |
||||
|
||||
if nod.Type == BRANCH { |
||||
if len(nod.Value) == 0 { |
||||
match["commit"] = defaultTags[match["vcs"]] |
||||
} else { |
||||
match["commit"] = nod.Value |
||||
} |
||||
} |
||||
|
||||
if nod.IsGetDeps { |
||||
if nod.Type == COMMIT { |
||||
tags := make(map[string]string) |
||||
for _, nodeType := range []string{"branches", "tags"} { |
||||
var nodes map[string]struct { |
||||
Node string |
||||
} |
||||
if err := httpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/{0}", match, nodeType), &nodes); err != nil { |
||||
return nil, err |
||||
} |
||||
for t, n := range nodes { |
||||
tags[t] = n.Node |
||||
} |
||||
} |
||||
|
||||
// Check revision tag.
|
||||
var err error |
||||
match["tag"], match["commit"], err = bestTag(tags, defaultTags[match["vcs"]]) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
nod.Value = match["commit"] |
||||
} |
||||
} else { |
||||
// Check downlaod type.
|
||||
switch nod.Type { |
||||
case TAG, COMMIT, BRANCH: |
||||
match["commit"] = nod.Value |
||||
default: |
||||
return nil, errors.New("Unknown node type: " + nod.Type) |
||||
} |
||||
} |
||||
|
||||
// We use .tar.gz here.
|
||||
// zip : https://bitbucket.org/{owner}/{repo}/get/{commit}.zip
|
||||
// tarball : https://bitbucket.org/{owner}/{repo}/get/{commit}.tar.gz
|
||||
|
||||
// Downlaod archive.
|
||||
p, err := HttpGetBytes(client, expand("https://bitbucket.org/{owner}/{repo}/get/{commit}.tar.gz", match), nil) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
suf := "." + nod.Value |
||||
if len(suf) == 1 { |
||||
suf = "" |
||||
} |
||||
|
||||
projectPath := expand("bitbucket.org/{owner}/{repo}", match) |
||||
installPath := installRepoPath + "/" + projectPath + suf |
||||
nod.ImportPath = projectPath |
||||
|
||||
// Remove old files.
|
||||
os.RemoveAll(installPath + "/") |
||||
os.MkdirAll(installPath+"/", os.ModePerm) |
||||
|
||||
gzr, err := gzip.NewReader(bytes.NewReader(p)) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
defer gzr.Close() |
||||
|
||||
tr := tar.NewReader(gzr) |
||||
|
||||
var autoPath string // Auto path is the root path that generated by bitbucket.org.
|
||||
// Get source file data.
|
||||
dirs := make([]string, 0, 5) |
||||
for { |
||||
h, err := tr.Next() |
||||
if err == io.EOF { |
||||
break |
||||
} else if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
fn := h.FileInfo().Name() |
||||
|
||||
// In case that we find directory, usually we should not.
|
||||
if strings.HasSuffix(fn, "/") { |
||||
continue |
||||
} |
||||
|
||||
// Check root path.
|
||||
if len(autoPath) == 0 { |
||||
autoPath = fn[:strings.Index(fn, "/")] |
||||
} |
||||
absPath := strings.Replace(fn, autoPath, installPath, 1) |
||||
|
||||
// Create diretory before create file.
|
||||
dir := path.Dir(absPath) |
||||
if !checkDir(dir, dirs) && !(!cmdFlags["-e"] && strings.Contains(absPath, "example")) { |
||||
dirs = append(dirs, dir) |
||||
os.MkdirAll(dir+"/", os.ModePerm) |
||||
} |
||||
|
||||
if strings.HasPrefix(fn, ".") { |
||||
continue |
||||
} |
||||
|
||||
// Get data from archive.
|
||||
fbytes := make([]byte, h.Size) |
||||
if _, err := io.ReadFull(tr, fbytes); err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
// Write data to file
|
||||
fw, err := os.Create(absPath) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
_, err = fw.Write(fbytes) |
||||
fw.Close() |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
|
||||
// Set modify time.
|
||||
os.Chtimes(absPath, h.AccessTime, h.ModTime) |
||||
} |
||||
|
||||
var imports []string |
||||
|
||||
// Check if need to check imports.
|
||||
if nod.IsGetDeps { |
||||
for _, d := range dirs { |
||||
importPkgs, err := CheckImports(d+"/", match["importPath"]) |
||||
if err != nil { |
||||
return nil, err |
||||
} |
||||
imports = append(imports, importPkgs...) |
||||
} |
||||
} |
||||
|
||||
return imports, err |
||||
} |
||||
|
||||
// checkDir checks if current directory has been saved.
|
||||
func checkDir(dir string, dirs []string) bool { |
||||
for _, d := range dirs { |
||||
if dir == d { |
||||
return true |
||||
} |
||||
} |
||||
return false |
||||
} |
Loading…
Reference in new issue