diff --git a/.gitignore b/.gitignore index f78fbe263..2b4fd370c 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ gopm *.exe *.exe~ +repos/ \ No newline at end of file diff --git a/cmd/cmd.go b/cmd/cmd.go index bf4176e04..19c96bb59 100644 --- a/cmd/cmd.go +++ b/cmd/cmd.go @@ -21,6 +21,7 @@ import ( ) var ( + AppPath string reposDir string = "~/.gopm/repos" ) diff --git a/cmd/get.go b/cmd/get.go index 2c710f7dd..6a502cbfc 100644 --- a/cmd/get.go +++ b/cmd/get.go @@ -15,22 +15,19 @@ package cmd import ( - "archive/zip" "errors" "fmt" - "io" - "net/http" - "os" "os/user" - "path" - "path/filepath" "strings" - "../doc" + "github.com/gpmgo/gopm/doc" ) var ( - installGOPATH string // The GOPATH that packages are downloaded to. + installRepoPath string + downloadCache map[string]bool // Saves packages that have been downloaded. + downloadCount int + failConut int ) var CmdGet = &Command{ @@ -68,10 +65,6 @@ func init() { } } -func isStandalone() bool { - return true -} - // printGetPrompt prints prompt information to users to // let them know what's going on. func printGetPrompt(flag string) { @@ -127,200 +120,136 @@ func runGet(cmd *Command, args []string) { return } - if len(args) > 0 { - var ver string = TRUNK - if len(args) == 2 { - ver = args[1] - } - pkg := NewPkg(args[0], ver) - if pkg == nil { - doc.ColorLog("[ERROR] Unrecognized package %v.\n", args[0]) - return - } - - if isStandalone() { - err := getDirect(pkg) - if err != nil { - doc.ColorLog("[ERROR] %v\n", err) - } else { - fmt.Println("done.") - } - } else { - fmt.Println("Not implemented.") - //getSource(pkgName) - } - } -} - -func dirExists(dir string) bool { - d, e := os.Stat(dir) - switch { - case e != nil: - return false - case !d.IsDir(): - return false - } - - return true -} - -func fileExists(dir string) bool { - info, err := os.Stat(dir) + curUser, err := user.Current() if err != nil { - return false + doc.ColorLog("[ERROR] Fail to get current user[ %s ]\n", err) + return } - return !info.IsDir() -} + installRepoPath = strings.Replace(reposDir, "~", curUser.HomeDir, -1) + doc.ColorLog("[INFO] Packages will be installed into( %s )\n", installRepoPath) -func joinPath(paths ...string) string { - if len(paths) < 1 { - return "" - } - res := "" - for _, p := range paths { - res = path.Join(res, p) - } - return res -} - -func download(url string, localfile string) error { - fmt.Println("Downloading", url, "...") - resp, err := http.Get(url) - if err != nil { - return err - } - defer resp.Body.Close() + nodes := []*doc.Node{} + // ver describles branch, tag or commit. + var t, ver string = doc.BRANCH, "" - localdir := filepath.Dir(localfile) - if !dirExists(localdir) { - err = os.MkdirAll(localdir, 0777) + if len(args) >= 2 { + t, ver, err = validPath(args[1]) if err != nil { - return err + doc.ColorLog("[ERROR] Fail to parse 'args'[ %s ]\n", err) + return } } - if !fileExists(localfile) { - f, err := os.Create(localfile) - if err == nil { - _, err = io.Copy(f, resp.Body) - } - if err != nil { - return err - } - } + nodes = append(nodes, &doc.Node{ + ImportPath: args[0], + DownloadURL: args[0], + Type: t, + Value: ver, + IsGetDeps: true, + }) - return nil -} + // Download package(s). + downloadPackages(nodes) -func extractPkg(pkg *Pkg, localfile string, update bool) error { - fmt.Println("Extracting package", pkg.Name, "...") + doc.ColorLog("[INFO] %d package(s) downloaded, %d failed.\n", + downloadCount, failConut) +} - gopath := os.Getenv("GOPATH") - var childDirs []string = strings.Split(pkg.Name, "/") +// downloadPackages downloads packages with certain commit, +// if the commit is empty string, then it downloads all dependencies, +// otherwise, it only downloada package with specific commit only. +func downloadPackages(nodes []*doc.Node) { + // Check all packages, they may be raw packages path. + for _, n := range nodes { + // Check if it is a valid remote path. + if doc.IsValidRemotePath(n.ImportPath) { + if !CmdGet.Flags["-u"] { + // Check if package has been downloaded. + installPath := installRepoPath + "/" + doc.GetProjectPath(n.ImportPath) + if len(n.Value) > 0 { + installPath += "." + n.Value + } + if doc.IsExist(installPath) { + doc.ColorLog("[WARN] Skipped installed package( %s => %s:%s )\n", + n.ImportPath, n.Type, n.Value) + continue + } + } - if pkg.Ver != TRUNK { - childDirs[len(childDirs)-1] = fmt.Sprintf("%v_%v_%v", childDirs[len(childDirs)-1], pkg.Ver, pkg.VerId) - } - dstDir := joinPath(gopath, "src", joinPath(childDirs...)) - //fmt.Println(dstDir) - var err error - if !update { - if dirExists(dstDir) { - return nil - } - err = os.MkdirAll(dstDir, 0777) - } else { - if dirExists(dstDir) { - err = os.Remove(dstDir) + if !downloadCache[n.ImportPath] { + // Download package. + nod, imports := downloadPackage(n) + if len(imports) > 0 { + // Need to download dependencies. + // Generate temporary nodes. + nodes := make([]*doc.Node, len(imports)) + for i := range nodes { + nodes[i] = &doc.Node{ + ImportPath: imports[i], + DownloadURL: imports[i], + Type: doc.BRANCH, + IsGetDeps: true, + } + } + downloadPackages(nodes) + } + + // Only save package information with specific commit. + if nod != nil { + // Save record in local nodes. + doc.ColorLog("[SUCC] Downloaded package( %s => %s:%s )\n", + n.ImportPath, n.Type, n.Value) + downloadCount++ + //saveNode(nod) + } + } else { + doc.ColorLog("[WARN] Skipped downloaded package( %s => %s:%s )\n", + n.ImportPath, n.Type, n.Value) + } } else { - err = os.MkdirAll(dstDir, 0777) + // Invalid import path. + doc.ColorLog("[WARN] Skipped invalid package path( %s => %s:%s )\n", + n.ImportPath, n.Type, n.Value) + failConut++ } } +} - if err != nil { - return err - } +// downloadPackage downloads package either use version control tools or not. +func downloadPackage(nod *doc.Node) (*doc.Node, []string) { + doc.ColorLog("[TRAC] Downloading package( %s => %s:%s )\n", + nod.ImportPath, nod.Type, nod.Value) + // Mark as donwloaded. + downloadCache[nod.ImportPath] = true - if path.Ext(localfile) != ".zip" { - return errors.New("Not implemented!") - } + imports, err := doc.PureDownload(nod, installRepoPath, CmdGet.Flags) - r, err := zip.OpenReader(localfile) if err != nil { - return err - } - defer r.Close() - - for _, f := range r.File { - fmt.Printf("Contents of %s:\n", f.Name) - if f.FileInfo().IsDir() { - continue - } - - paths := strings.Split(f.Name, "/")[1:] - //fmt.Println(paths) - if len(paths) < 1 { - continue - } - - if len(paths) > 1 { - childDir := joinPath(dstDir, joinPath(paths[0:len(paths)-1]...)) - //fmt.Println("creating", childDir) - err = os.MkdirAll(childDir, 0777) - if err != nil { - return err - } - } - - rc, err := f.Open() - if err != nil { - return err - } - - newF, err := os.Create(path.Join(dstDir, joinPath(paths...))) - if err == nil { - _, err = io.Copy(newF, rc) - } - if err != nil { - return err - } - rc.Close() + doc.ColorLog("[ERRO] Download falied[ %s ]\n", err) + failConut++ + return nil, nil } - return nil + return nod, imports } -func getPackage(pkg *Pkg, url string) error { - curUser, err := user.Current() - if err != nil { - return err - } - - reposDir = strings.Replace(reposDir, "~", curUser.HomeDir, -1) - localdir := path.Join(reposDir, pkg.Name) - localdir, err = filepath.Abs(localdir) - if err != nil { - return err - } - - localfile := path.Join(localdir, pkg.FileName()) +// validPath checks if the information of the package is valid. +func validPath(info string) (string, string, error) { + infos := strings.Split(info, ":") - err = download(url, localfile) - if err != nil { - return err + l := len(infos) + switch { + case l > 2: + return "", "", errors.New("Invalid information of package") + case l == 1: + return doc.BRANCH, "", nil + case l == 2: + switch infos[1] { + case doc.TRUNK, doc.MASTER, doc.DEFAULT: + infos[1] = "" + } + return infos[0], infos[1], nil + default: + return "", "", errors.New("Cannot match any case") } - - return extractPkg(pkg, localfile, false) } - -func getDirect(pkg *Pkg) error { - return getPackage(pkg, pkg.Url()) -} - -/*func getFromSource(pkgName string, ver string, source string) error { - urlTempl := "https://%v/%v" - //urlTempl := "https://%v/archive/master.zip" - url := fmt.Sprintf(urlTempl, source, pkgName) - - return getPackage(pkgName, ver, url) -}*/ diff --git a/cmd/search.go b/cmd/search.go index 917a4b04c..afbcc694b 100644 --- a/cmd/search.go +++ b/cmd/search.go @@ -15,11 +15,12 @@ package cmd import ( - "../doc" "encoding/json" "fmt" "io/ioutil" "net/http" + + "github.com/gpmgo/gopm/doc" ) var CmdSearch = &Command{ diff --git a/cmd/serve.go b/cmd/serve.go index cb7cb53a9..c45f93db7 100644 --- a/cmd/serve.go +++ b/cmd/serve.go @@ -1,7 +1,20 @@ +// Copyright 2013 gopm authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + package cmd import ( - "../doc" "fmt" "github.com/syndtr/goleveldb/leveldb" "github.com/syndtr/goleveldb/leveldb/errors" @@ -12,6 +25,8 @@ import ( "os/user" "strconv" "strings" + + "github.com/gpmgo/gopm/doc" ) var ( @@ -129,7 +144,7 @@ func batchPut(batch *leveldb.Batch, key string, value string) error { return nil } -func addPkg(pkg *Pkg) error { +func addNode(nod *doc.Node) error { batch := new(leveldb.Batch) strLastId, err := dbGet("lastId") if err != nil { @@ -151,19 +166,19 @@ func addPkg(pkg *Pkg) error { return err } - pkgKey := fmt.Sprintf("index:%v", pkg.Name) + nodKey := fmt.Sprintf("index:%v", nod.ImportPath) - id, err := dbGet(pkgKey) + id, err := dbGet(nodKey) if err != nil { if err == errors.ErrNotFound { id = fmt.Sprintf("%v", lastId+1) fmt.Println(id) err = batchPut(batch, "lastId", id) if err == nil { - err = batchPut(batch, pkgKey, id) + err = batchPut(batch, nodKey, id) } if err == nil { - err = batchPut(batch, "pkg:"+id, pkg.Name) + err = batchPut(batch, "pkg:"+id, nod.ImportPath) } total, err := dbGet("total") if err != nil { @@ -202,11 +217,11 @@ func addPkg(pkg *Pkg) error { } if vers == "" { - fmt.Println(pkg) - vers = pkg.VerString() + fmt.Println(nod) + vers = nod.VerString() } else { - if !strings.Contains(vers, pkg.VerString()) { - vers = vers + "," + pkg.VerString() + if !strings.Contains(vers, nod.VerString()) { + vers = vers + "," + nod.VerString() } else { return nil } @@ -221,7 +236,7 @@ func addPkg(pkg *Pkg) error { return nil } - keys := splitPkgName(pkg.Name) + keys := splitPkgName(nod.ImportPath) for key, _ := range keys { err = batchPut(batch, fmt.Sprintf("key:%v:%v", key, id), "") @@ -233,7 +248,7 @@ func addPkg(pkg *Pkg) error { return db.Write(batch, wo) } -func rmPkg(pkg *Pkg) { +func rmPkg(nod *doc.Node) { } @@ -391,9 +406,14 @@ func addHandler(w http.ResponseWriter, r *http.Request) { r.ParseForm() for key, _ := range r.Form { fmt.Println(key) - pkg := NewPkg(key, "") - if pkg != nil { - err := addPkg(pkg) + // pkg := NewPkg(key, "") + nod := &doc.Node{ + ImportPath: key, + DownloadURL: key, + IsGetDeps: true, + } + if nod != nil { + err := addNode(nod) if err != nil { fmt.Println(err) } diff --git a/cmd/service.go b/cmd/service.go index f50630cc0..11f0bf19e 100644 --- a/cmd/service.go +++ b/cmd/service.go @@ -14,232 +14,200 @@ package cmd -import ( - //"errors" - "fmt" - "strings" -) - -const ( - TRUNK = "trunk" - TAG = "tag" - BRANCH = "branch" - COMMIT = "commit" -) - -var ( - downloadCache map[string]bool // Saves packages that have been downloaded. - services []Service = []Service{ - &GithubService{}, - &GitOscService{}, - &BitBucketService{}, - //&GitCafeService{}, - //&CodeCSDNSource{}, - } -) - -func getService(pkgName string) Service { - for _, service := range services { - if service.HasPkg(pkgName) { - return service - } - } - return nil -} - -type Service interface { - PkgUrl(pkg *Pkg) string - HasPkg(pkgName string) bool - PkgExt() string -} - -type Pkg struct { - Service Service - Name string - Ver string - VerId string -} - -func (p *Pkg) VerSimpleString() string { - if p.VerId != "" { - return p.VerId - } - return p.Ver -} - -func (p *Pkg) VerString() string { - if p.VerId == "" { - return p.Ver - } - return fmt.Sprintf("%v:%v", p.Ver, p.VerId) -} - -func (p *Pkg) Url() string { - return p.Service.PkgUrl(p) -} - -func (p *Pkg) FileName() string { - return fmt.Sprintf("%v.%v", p.VerSimpleString(), p.Service.PkgExt()) -} - -func NewPkg(pkgName string, ver string) *Pkg { - vers := strings.Split(ver, ":") - if len(vers) > 2 { - return nil - } - - var verId string - if len(vers) == 2 { - verId = vers[1] - } - - if len(vers) == 1 { - vers[0] = TRUNK - } - - service := getService(pkgName) - if service == nil { - return nil - } - - return &Pkg{service, pkgName, vers[0], verId} -} - -// github repository -type GithubService struct { -} - -func (s *GithubService) PkgUrl(pkg *Pkg) string { - var verPath string - if pkg.Ver == TRUNK { - verPath = "master" - } else { - verPath = pkg.VerId - } - return fmt.Sprintf("https://%v/archive/%v.zip", pkg.Name, verPath) -} - -func (s *GithubService) HasPkg(pkgName string) bool { - return strings.HasPrefix(pkgName, "github.com") -} - -func (s *GithubService) PkgExt() string { - return "zip" -} - -// git osc repos -type GitOscService struct { -} - -func (s *GitOscService) PkgUrl(pkg *Pkg) string { - var verPath string - if pkg.Ver == TRUNK { - verPath = "master" - } else { - verPath = pkg.VerId - } - return fmt.Sprintf("https://%v/repository/archive?ref=%v", pkg.Name, verPath) -} - -func (s *GitOscService) HasPkg(pkgName string) bool { - return strings.HasPrefix(pkgName, "git.oschina.net") -} - -func (s *GitOscService) PkgExt() string { - return "zip" -} - -// bitbucket.org -type BitBucketService struct { -} - -func (s *BitBucketService) PkgUrl(pkg *Pkg) string { - var verPath string - if pkg.Ver == TRUNK { - verPath = "default" - } else { - verPath = pkg.VerId - } - - return fmt.Sprintf("https://%v/get/%v.zip", pkg.Name, verPath) -} - -func (s *BitBucketService) HasPkg(pkgName string) bool { - return strings.HasPrefix(pkgName, "bitbucket.org") -} - -func (s *BitBucketService) PkgExt() string { - return "zip" -} - -type GitCafeService struct { -} - -func (s *GitCafeService) PkgUrl(pkg *Pkg) string { - var verPath string - if pkg.Ver == TRUNK { - verPath = "master" - } else { - verPath = pkg.VerId - } - - return fmt.Sprintf("https://%v/tarball/%v", pkg.Name, verPath) -} - -func (s *GitCafeService) HasPkg(pkgName string) bool { - return strings.HasPrefix(pkgName, "gitcafe.com") -} - -func (s *GitCafeService) PkgExt() string { - return "tar.gz" -} - -// git lab repos, not completed -type GitLabService struct { - DomainOrIp string - Username string - Passwd string - PrivateKey string -} - -func (s *GitLabService) PkgUrl(pkg *Pkg) string { - var verPath string - if pkg.Ver == TRUNK { - verPath = "master" - } else { - verPath = pkg.VerId - } - - return fmt.Sprintf("https://%v/repository/archive/%v", pkg.Name, verPath) -} - -func (s *GitLabService) HasPkg(pkgName string) bool { - return strings.HasPrefix(pkgName, s.DomainOrIp) -} - -func (s *GitLabService) PkgExt() string { - return "tar.gz" -} - -// code.csdn.net -type CodeCSDNService struct { -} - -func (s *CodeCSDNService) PkgUrl(pkg *Pkg) string { - var verPath string - if pkg.Ver == TRUNK { - verPath = "master" - } else { - verPath = pkg.VerId - } - - return fmt.Sprintf("https://%v/repository/archive?ref=%v", pkg.Name, verPath) -} - -func (s *CodeCSDNService) HasPkg(pkgName string) bool { - return strings.HasPrefix(pkgName, "code.csdn.net") -} - -func (s *CodeCSDNService) PkgExt() string { - return "zip" -} +// func getService(pkgName string) Service { +// for _, service := range services { +// if service.HasPkg(pkgName) { +// return service +// } +// } +// return nil +// } + +// type Service interface { +// PkgUrl(pkg *Pkg) string +// HasPkg(pkgName string) bool +// PkgExt() string +// } + +// type Pkg struct { +// Name string +// Ver string +// VerId string +// } + +// func NewPkg(pkgName string, ver string) *Pkg { +// vers := strings.Split(ver, ":") +// if len(vers) > 2 { +// return nil +// } + +// var verId string +// if len(vers) == 2 { +// verId = vers[1] +// } + +// if len(vers) == 1 { +// vers[0] = TRUNK +// } + +// service := getService(pkgName) +// if service == nil { +// return nil +// } + +// return &Pkg{service, pkgName, vers[0], verId} +// } + +// func (p *Pkg) VerSimpleString() string { +// if p.VerId != "" { +// return p.VerId +// } +// return p.Ver +// } + +// func (p *Pkg) Url() string { +// return p.Service.PkgUrl(p) +// } + +// func (p *Pkg) FileName() string { +// return fmt.Sprintf("%v.%v", p.VerSimpleString(), p.Service.PkgExt()) +// } + +// // github repository +// type GithubService struct { +// } + +// func (s *GithubService) PkgUrl(pkg *Pkg) string { +// var verPath string +// if pkg.Ver == TRUNK { +// verPath = "master" +// } else { +// verPath = pkg.VerId +// } +// return fmt.Sprintf("https://%v/archive/%v.zip", pkg.Name, verPath) +// } + +// func (s *GithubService) HasPkg(pkgName string) bool { +// return strings.HasPrefix(pkgName, "github.com") +// } + +// func (s *GithubService) PkgExt() string { +// return "zip" +// } + +// // git osc repos +// type GitOscService struct { +// } + +// func (s *GitOscService) PkgUrl(pkg *Pkg) string { +// var verPath string +// if pkg.Ver == TRUNK { +// verPath = "master" +// } else { +// verPath = pkg.VerId +// } +// return fmt.Sprintf("https://%v/repository/archive?ref=%v", pkg.Name, verPath) +// } + +// func (s *GitOscService) HasPkg(pkgName string) bool { +// return strings.HasPrefix(pkgName, "git.oschina.net") +// } + +// func (s *GitOscService) PkgExt() string { +// return "zip" +// } + +// // bitbucket.org +// type BitBucketService struct { +// } + +// func (s *BitBucketService) PkgUrl(pkg *Pkg) string { +// var verPath string +// if pkg.Ver == TRUNK { +// verPath = "default" +// } else { +// verPath = pkg.VerId +// } + +// return fmt.Sprintf("https://%v/get/%v.zip", pkg.Name, verPath) +// } + +// func (s *BitBucketService) HasPkg(pkgName string) bool { +// return strings.HasPrefix(pkgName, "bitbucket.org") +// } + +// func (s *BitBucketService) PkgExt() string { +// return "zip" +// } + +// type GitCafeService struct { +// } + +// func (s *GitCafeService) PkgUrl(pkg *Pkg) string { +// var verPath string +// if pkg.Ver == TRUNK { +// verPath = "master" +// } else { +// verPath = pkg.VerId +// } + +// return fmt.Sprintf("https://%v/tarball/%v", pkg.Name, verPath) +// } + +// func (s *GitCafeService) HasPkg(pkgName string) bool { +// return strings.HasPrefix(pkgName, "gitcafe.com") +// } + +// func (s *GitCafeService) PkgExt() string { +// return "tar.gz" +// } + +// // git lab repos, not completed +// type GitLabService struct { +// DomainOrIp string +// Username string +// Passwd string +// PrivateKey string +// } + +// func (s *GitLabService) PkgUrl(pkg *Pkg) string { +// var verPath string +// if pkg.Ver == TRUNK { +// verPath = "master" +// } else { +// verPath = pkg.VerId +// } + +// return fmt.Sprintf("https://%v/repository/archive/%v", pkg.Name, verPath) +// } + +// func (s *GitLabService) HasPkg(pkgName string) bool { +// return strings.HasPrefix(pkgName, s.DomainOrIp) +// } + +// func (s *GitLabService) PkgExt() string { +// return "tar.gz" +// } + +// // code.csdn.net +// type CodeCSDNService struct { +// } + +// func (s *CodeCSDNService) PkgUrl(pkg *Pkg) string { +// var verPath string +// if pkg.Ver == TRUNK { +// verPath = "master" +// } else { +// verPath = pkg.VerId +// } + +// return fmt.Sprintf("https://%v/repository/archive?ref=%v", pkg.Name, verPath) +// } + +// func (s *CodeCSDNService) HasPkg(pkgName string) bool { +// return strings.HasPrefix(pkgName, "code.csdn.net") +// } + +// func (s *CodeCSDNService) PkgExt() string { +// return "zip" +// } diff --git a/doc/bitbucket.go b/doc/bitbucket.go new file mode 100644 index 000000000..5b0e537a0 --- /dev/null +++ b/doc/bitbucket.go @@ -0,0 +1,210 @@ +// Copyright 2013 gopm authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package doc + +import ( + "archive/tar" + "bytes" + "compress/gzip" + "errors" + "io" + "net/http" + "os" + "path" + "regexp" + "strings" + + "github.com/Unknwon/com" +) + +var ( + bitbucketPattern = regexp.MustCompile(`^bitbucket\.org/(?P[a-z0-9A-Z_.\-]+)/(?P[a-z0-9A-Z_.\-]+)(?P/[a-z0-9A-Z_.\-/]*)?$`) + bitbucketEtagRe = regexp.MustCompile(`^(hg|git)-`) +) + +// getBitbucketDoc downloads tarball from bitbucket.org. +func getBitbucketDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, cmdFlags map[string]bool) ([]string, error) { + // Check version control. + if m := bitbucketEtagRe.FindStringSubmatch(nod.Value); m != nil { + match["vcs"] = m[1] + } else { + var repo struct { + Scm string + } + if err := com.HttpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}", match), &repo); err != nil { + return nil, err + } + match["vcs"] = repo.Scm + } + + if nod.Type == BRANCH { + if len(nod.Value) == 0 { + match["commit"] = defaultTags[match["vcs"]] + } else { + match["commit"] = nod.Value + } + } + + if nod.IsGetDeps { + if nod.Type == COMMIT { + tags := make(map[string]string) + for _, nodeType := range []string{"branches", "tags"} { + var nodes map[string]struct { + Node string + } + if err := com.HttpGetJSON(client, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/{0}", match, nodeType), &nodes); err != nil { + return nil, err + } + for t, n := range nodes { + tags[t] = n.Node + } + } + + // Check revision tag. + var err error + match["tag"], match["commit"], err = bestTag(tags, defaultTags[match["vcs"]]) + if err != nil { + return nil, err + } + + nod.Value = match["commit"] + } + } else { + // Check downlaod type. + switch nod.Type { + case TAG, COMMIT, BRANCH: + match["commit"] = nod.Value + default: + return nil, errors.New("Unknown node type: " + nod.Type) + } + } + + // We use .tar.gz here. + // zip : https://bitbucket.org/{owner}/{repo}/get/{commit}.zip + // tarball : https://bitbucket.org/{owner}/{repo}/get/{commit}.tar.gz + + // Downlaod archive. + p, err := com.HttpGetBytes(client, expand("https://bitbucket.org/{owner}/{repo}/get/{commit}.tar.gz", match), nil) + if err != nil { + return nil, err + } + + var installPath string + if nod.ImportPath == nod.DownloadURL { + suf := "." + nod.Value + if len(suf) == 1 { + suf = "" + } + projectPath := expand("bitbucket.org/{owner}/{repo}", match) + installPath = installRepoPath + "/" + projectPath + suf + nod.ImportPath = projectPath + } else { + installPath = installRepoPath + "/" + nod.ImportPath + } + + // Remove old files. + os.RemoveAll(installPath + "/") + os.MkdirAll(installPath+"/", os.ModePerm) + + gzr, err := gzip.NewReader(bytes.NewReader(p)) + if err != nil { + return nil, err + } + defer gzr.Close() + + tr := tar.NewReader(gzr) + + var autoPath string // Auto path is the root path that generated by bitbucket.org. + // Get source file data. + dirs := make([]string, 0, 5) + for { + h, err := tr.Next() + if err == io.EOF { + break + } else if err != nil { + return nil, err + } + + fn := h.FileInfo().Name() + + // In case that we find directory, usually we should not. + if strings.HasSuffix(fn, "/") { + continue + } + + // Check root path. + if len(autoPath) == 0 { + autoPath = fn[:strings.Index(fn, "/")] + } + absPath := strings.Replace(fn, autoPath, installPath, 1) + + // Create diretory before create file. + dir := path.Dir(absPath) + if !checkDir(dir, dirs) && !(!cmdFlags["-e"] && strings.Contains(absPath, "example")) { + dirs = append(dirs, dir) + os.MkdirAll(dir+"/", os.ModePerm) + } + + if strings.HasPrefix(fn, ".") { + continue + } + + // Get data from archive. + fbytes := make([]byte, h.Size) + if _, err := io.ReadFull(tr, fbytes); err != nil { + return nil, err + } + + // Write data to file + fw, err := os.Create(absPath) + if err != nil { + return nil, err + } + + _, err = fw.Write(fbytes) + fw.Close() + if err != nil { + return nil, err + } + + // Set modify time. + os.Chtimes(absPath, h.AccessTime, h.ModTime) + } + + var imports []string + + // Check if need to check imports. + if nod.IsGetDeps { + for _, d := range dirs { + importPkgs, err := CheckImports(d+"/", match["importPath"]) + if err != nil { + return nil, err + } + imports = append(imports, importPkgs...) + } + } + + return imports, err +} + +// checkDir checks if current directory has been saved. +func checkDir(dir string, dirs []string) bool { + for _, d := range dirs { + if dir == d { + return true + } + } + return false +} diff --git a/doc/error.go b/doc/error.go new file mode 100644 index 000000000..56890c961 --- /dev/null +++ b/doc/error.go @@ -0,0 +1,25 @@ +// Copyright 2013 gopm authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package doc + +import ( + "errors" +) + +var ( + errNotModified = errors.New("Package not modified") + errNoMatch = errors.New("no match") + errUpdateTimeout = errors.New("update timeout") +) diff --git a/doc/github.go b/doc/github.go new file mode 100644 index 000000000..7ab20ea8a --- /dev/null +++ b/doc/github.go @@ -0,0 +1,174 @@ +// Copyright 2013 gopm authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package doc + +import ( + "archive/zip" + "bytes" + "errors" + "io" + "net/http" + "os" + "path" + "regexp" + "strings" + + "github.com/Unknwon/com" +) + +var ( + githubRawHeader = http.Header{"Accept": {"application/vnd.github-blob.raw"}} + githubPattern = regexp.MustCompile(`^github\.com/(?P[a-z0-9A-Z_.\-]+)/(?P[a-z0-9A-Z_.\-]+)(?P/[a-z0-9A-Z_.\-/]*)?$`) + githubCred string +) + +/*func SetGithubCredentials(id, secret string) { + //githubCred = "client_id=" + id + "&client_secret=" + secret +}*/ + +func SetGithubCredentials(token string) { + if len(token) > 0 { + githubCred = "access_token=" + token + } +} + +// getGithubDoc downloads tarball from github.com. +func getGithubDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, cmdFlags map[string]bool) ([]string, error) { + match["cred"] = githubCred + + // Check downlaod type. + switch nod.Type { + case BRANCH: + if len(nod.Value) == 0 { + match["sha"] = MASTER + } else { + match["sha"] = nod.Value + } + case TAG, COMMIT: + match["sha"] = nod.Value + default: + return nil, errors.New("Unknown node type: " + nod.Type) + } + + // We use .zip here. + // zip: https://github.com/{owner}/{repo}/archive/{sha}.zip + // tarball: https://github.com/{owner}/{repo}/tarball/{sha} + + // Downlaod archive. + p, err := com.HttpGetBytes(client, expand("https://github.com/{owner}/{repo}/archive/{sha}.zip", match), nil) + if err != nil { + return nil, errors.New("Fail to donwload Github repo -> " + err.Error()) + } + + shaName := expand("{repo}-{sha}", match) + if nod.Type == "tag" { + shaName = strings.Replace(shaName, "-v", "-", 1) + } + + var installPath string + if nod.ImportPath == nod.DownloadURL { + suf := "." + nod.Value + if len(suf) == 1 { + suf = "" + } + projectPath := expand("github.com/{owner}/{repo}", match) + installPath = installRepoPath + "/" + projectPath + suf + nod.ImportPath = projectPath + } else { + installPath = installRepoPath + "/" + nod.ImportPath + } + + // Remove old files. + os.RemoveAll(installPath + "/") + os.MkdirAll(installPath+"/", os.ModePerm) + + r, err := zip.NewReader(bytes.NewReader(p), int64(len(p))) + if err != nil { + return nil, err + } + + dirs := make([]string, 0, 5) + // Need to add root path because we cannot get from tarball. + dirs = append(dirs, installPath+"/") + for _, f := range r.File { + absPath := strings.Replace(f.FileInfo().Name(), shaName, installPath, 1) + // Create diretory before create file. + os.MkdirAll(path.Dir(absPath)+"/", os.ModePerm) + + compareDir: + switch { + case strings.HasSuffix(absPath, "/"): // Directory. + // Check if current directory is example. + if !(!cmdFlags["-e"] && strings.Contains(absPath, "example")) { + for _, d := range dirs { + if d == absPath { + break compareDir + } + } + dirs = append(dirs, absPath) + } + case !strings.HasPrefix(f.FileInfo().Name(), "."): + // Get file from archive. + rc, err := f.Open() + if err != nil { + return nil, err + } + + // Write data to file + fw, _ := os.Create(absPath) + if err != nil { + return nil, err + } + + _, err = io.Copy(fw, rc) + // Close files. + rc.Close() + fw.Close() + if err != nil { + return nil, err + } + + // Set modify time. + os.Chtimes(absPath, f.ModTime(), f.ModTime()) + } + } + + var imports []string + + // Check if need to check imports. + if nod.IsGetDeps { + for _, d := range dirs { + importPkgs, err := CheckImports(d, match["importPath"]) + if err != nil { + return nil, err + } + imports = append(imports, importPkgs...) + } + } + + /*fpath := appPath + "repo/tarballs/" + node.ImportPath + "-" + node.Value + ".zip" + // Save tarball. + if autoBackup && !utils.IsExist(fpath) { + os.MkdirAll(path.Dir(fpath)+"/", os.ModePerm) + f, err := os.Create(fpath) + if err != nil { + return nil, err + } + defer f.Close() + _, err = f.Write(p) + }*/ + + return imports, err +} diff --git a/doc/google.go b/doc/google.go new file mode 100644 index 000000000..d1e2c48f2 --- /dev/null +++ b/doc/google.go @@ -0,0 +1,86 @@ +// Copyright 2013 gopm authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package doc + +import ( + "net/http" + "os" + "path" + "regexp" + + "github.com/Unknwon/com" + "github.com/Unknwon/ctw/packer" +) + +var ( + googlePattern = regexp.MustCompile(`^code\.google\.com/p/(?P[a-z0-9\-]+)(:?\.(?P[a-z0-9\-]+))?(?P/[a-z0-9A-Z_.\-/]+)?$`) +) + +// getGoogleDoc downloads raw files from code.google.com. +func getGoogleDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, cmdFlags map[string]bool) ([]string, error) { + packer.SetupGoogleMatch(match) + // Check version control. + if err := packer.GetGoogleVCS(client, match); err != nil { + return nil, err + } + + var installPath string + if nod.ImportPath == nod.DownloadURL { + suf := "." + nod.Value + if len(suf) == 1 { + suf = "" + } + projectPath := expand("code.google.com/p/{repo}{dot}{subrepo}{dir}", match) + installPath = installRepoPath + "/" + projectPath + suf + nod.ImportPath = projectPath + } else { + installPath = installRepoPath + "/" + nod.ImportPath + } + + // Remove old files. + os.RemoveAll(installPath + "/") + match["tag"] = nod.Value + + ext := ".zip" + if match["vcs"] == "svn" { + ext = ".tar.gz" + } + + err := packer.PackToFile(match["importPath"], installPath+ext, match) + if err != nil { + return nil, err + } + + var dirs []string + if match["vcs"] != "svn" { + dirs, err = com.Unzip(installPath+ext, path.Dir(installPath)) + } else { + dirs, err = com.UnTarGz(installPath+ext, path.Dir(installPath)) + } + + if err != nil { + return nil, err + } + os.Remove(installPath + ext) + os.Rename(path.Dir(installPath)+"/"+dirs[0], installPath) + + // Check if need to check imports. + if nod.IsGetDeps { + imports := getImports(installPath+"/", match, cmdFlags) + return imports, err + } + + return nil, err +} diff --git a/doc/http.go b/doc/http.go new file mode 100644 index 000000000..e5f34e05b --- /dev/null +++ b/doc/http.go @@ -0,0 +1,52 @@ +// Copyright 2013 gopm authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package doc + +import ( + "flag" + "net" + "net/http" + "time" + + "github.com/Unknwon/com" +) + +var ( + dialTimeout = flag.Duration("dial_timeout", 10*time.Second, "Timeout for dialing an HTTP connection.") + requestTimeout = flag.Duration("request_timeout", 20*time.Second, "Time out for roundtripping an HTTP request.") +) + +func timeoutDial(network, addr string) (net.Conn, error) { + return net.DialTimeout(network, addr, *dialTimeout) +} + +type transport struct { + t http.Transport +} + +func (t *transport) RoundTrip(req *http.Request) (*http.Response, error) { + timer := time.AfterFunc(*requestTimeout, func() { + t.t.CancelRequest(req) + com.ColorLog("[WARN] Canceled request for %s, please interrupt the program.\n", req.URL) + }) + defer timer.Stop() + resp, err := t.t.RoundTrip(req) + return resp, err +} + +var ( + httpTransport = &transport{t: http.Transport{Dial: timeoutDial, ResponseHeaderTimeout: *requestTimeout / 2}} + HttpClient = &http.Client{Transport: httpTransport} +) diff --git a/doc/launchpad.go b/doc/launchpad.go new file mode 100644 index 000000000..b7ac63c8d --- /dev/null +++ b/doc/launchpad.go @@ -0,0 +1,140 @@ +// Copyright 2013 gopm authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package doc + +import ( + "archive/tar" + "bytes" + "compress/gzip" + "io" + "net/http" + "os" + "regexp" + "strings" + + "github.com/Unknwon/com" +) + +var launchpadPattern = regexp.MustCompile(`^launchpad\.net/(?P(?P[a-z0-9A-Z_.\-]+)(?P/[a-z0-9A-Z_.\-]+)?|~[a-z0-9A-Z_.\-]+/(\+junk|[a-z0-9A-Z_.\-]+)/[a-z0-9A-Z_.\-]+)(?P/[a-z0-9A-Z_.\-/]+)*$`) + +// getLaunchpadDoc downloads tarball from launchpad.net. +func getLaunchpadDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, cmdFlags map[string]bool) ([]string, error) { + + if match["project"] != "" && match["series"] != "" { + rc, err := com.HttpGet(client, expand("https://code.launchpad.net/{project}{series}/.bzr/branch-format", match), nil) + _, isNotFound := err.(com.NotFoundError) + switch { + case err == nil: + rc.Close() + // The structure of the import path is launchpad.net/{root}/{dir}. + case isNotFound: + // The structure of the import path is is launchpad.net/{project}/{dir}. + match["repo"] = match["project"] + match["dir"] = expand("{series}{dir}", match) + default: + return nil, err + } + } + + var downloadPath string + // Check if download with specific revision. + if len(nod.Value) == 0 { + downloadPath = expand("https://bazaar.launchpad.net/+branch/{repo}/tarball", match) + } else { + downloadPath = expand("https://bazaar.launchpad.net/+branch/{repo}/tarball/"+nod.Value, match) + } + + // Scrape the repo browser to find the project revision and individual Go files. + p, err := com.HttpGetBytes(client, downloadPath, nil) + if err != nil { + return nil, err + } + + installPath := installRepoPath + "/" + nod.ImportPath + + // Remove old files. + os.RemoveAll(installPath + "/") + os.MkdirAll(installPath+"/", os.ModePerm) + + gzr, err := gzip.NewReader(bytes.NewReader(p)) + if err != nil { + return nil, err + } + defer gzr.Close() + + tr := tar.NewReader(gzr) + + var autoPath string // Auto path is the root path that generated by bitbucket.org. + // Get source file data. + dirs := make([]string, 0, 5) + for { + h, err := tr.Next() + if err == io.EOF { + break + } else if err != nil { + return nil, err + } + + fn := h.FileInfo().Name() + // Check root path. + if len(autoPath) == 0 { + autoPath = fn[:strings.Index(fn, match["repo"])+len(match["repo"])] + } + absPath := strings.Replace(fn, autoPath, installPath, 1) + + switch { + case h.FileInfo().IsDir(): // Directory. + // Create diretory before create file. + os.MkdirAll(absPath+"/", os.ModePerm) + // Check if current directory is example. + if !(!cmdFlags["-e"] && strings.Contains(absPath, "example")) { + dirs = append(dirs, absPath) + } + case !strings.HasPrefix(fn, "."): + // Get data from archive. + fbytes := make([]byte, h.Size) + if _, err := io.ReadFull(tr, fbytes); err != nil { + return nil, err + } + + // Write data to file + fw, err := os.Create(absPath) + if err != nil { + return nil, err + } + + _, err = fw.Write(fbytes) + fw.Close() + if err != nil { + return nil, err + } + } + } + + var imports []string + + // Check if need to check imports. + if nod.IsGetDeps { + for _, d := range dirs { + importPkgs, err := CheckImports(d+"/", match["importPath"]) + if err != nil { + return nil, err + } + imports = append(imports, importPkgs...) + } + } + + return imports, err +} diff --git a/doc/oschina.go b/doc/oschina.go new file mode 100644 index 000000000..d6ca10bfe --- /dev/null +++ b/doc/oschina.go @@ -0,0 +1,135 @@ +// Copyright 2013 gopm authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package doc + +import ( + "archive/zip" + "bytes" + "errors" + "io" + "net/http" + "os" + "regexp" + "strings" + + "github.com/Unknwon/com" +) + +var ( + oscTagRe = regexp.MustCompile(`/repository/archive\?ref=(.*)">`) + oscPattern = regexp.MustCompile(`^git\.oschina\.net/(?P[a-z0-9A-Z_.\-]+)/(?P[a-z0-9A-Z_.\-]+)(?P/[a-z0-9A-Z_.\-/]*)?$`) +) + +// getGithubDoc downloads tarball from git.oschina.com. +func getOSCDoc(client *http.Client, match map[string]string, installRepoPath string, nod *Node, cmdFlags map[string]bool) ([]string, error) { + // Check downlaod type. + switch nod.Type { + case BRANCH: + if len(nod.Value) == 0 { + match["sha"] = MASTER + } else { + match["sha"] = nod.Value + } + case TAG, COMMIT: + match["sha"] = nod.Value + default: + return nil, errors.New("Unknown node type: " + nod.Type) + } + + // zip: http://{projectRoot}/repository/archive?ref={sha} + + // Downlaod archive. + p, err := com.HttpGetBytes(client, expand("http://git.oschina.net/{owner}/{repo}/repository/archive?ref={sha}", match), nil) + if err != nil { + return nil, errors.New("Fail to donwload OSChina repo -> " + err.Error()) + } + + var installPath string + if nod.ImportPath == nod.DownloadURL { + suf := "." + nod.Value + if len(suf) == 1 { + suf = "" + } + projectPath := expand("git.oschina.net/{owner}/{repo}", match) + installPath = installRepoPath + "/" + projectPath + suf + nod.ImportPath = projectPath + } else { + installPath = installRepoPath + "/" + nod.ImportPath + } + + // Remove old files. + os.RemoveAll(installPath + "/") + os.MkdirAll(installPath+"/", os.ModePerm) + + r, err := zip.NewReader(bytes.NewReader(p), int64(len(p))) + if err != nil { + return nil, errors.New("Fail to unzip OSChina repo -> " + err.Error()) + } + + nameLen := len(match["repo"]) + dirs := make([]string, 0, 5) + // Need to add root path because we cannot get from tarball. + dirs = append(dirs, installPath+"/") + for _, f := range r.File { + fileName := f.FileInfo().Name()[nameLen+1:] + absPath := installPath + "/" + fileName + + if strings.HasSuffix(absPath, "/") { + dirs = append(dirs, absPath) + os.MkdirAll(absPath, os.ModePerm) + continue + } + // d, _ := path.Split(absPath) + // if !checkDir(d, dirs) { + // dirs = append(dirs, d) + // os.MkdirAll(d, os.ModePerm) + // } + + // Get file from archive. + rc, err := f.Open() + if err != nil { + return nil, errors.New("Fail to open OSChina repo -> " + err.Error()) + } + + // Write data to file + fw, _ := os.Create(absPath) + if err != nil { + return nil, err + } + + _, err = io.Copy(fw, rc) + // Close files. + rc.Close() + fw.Close() + if err != nil { + return nil, err + } + } + + var imports []string + + // Check if need to check imports. + if nod.IsGetDeps { + for _, d := range dirs { + importPkgs, err := CheckImports(d, match["importPath"]) + if err != nil { + return nil, err + } + imports = append(imports, importPkgs...) + } + } + + return imports, err +} diff --git a/doc/struct.go b/doc/struct.go new file mode 100644 index 000000000..0993f6707 --- /dev/null +++ b/doc/struct.go @@ -0,0 +1,85 @@ +// Copyright 2013 gopm authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package doc + +import ( + "fmt" + "go/token" + "net/http" + "os" + "regexp" + "time" +) + +const ( + TRUNK = "trunk" + MASTER = "master" + DEFAULT = "default" + TAG = "tag" + BRANCH = "branch" + COMMIT = "commit" +) + +type Node struct { + ImportPath string + DownloadURL string + Type string + Value string // Branch, tag or commit. + IsGetDeps bool +} + +func (nod *Node) VerString() string { + if nod.Value == "" { + return nod.Type + } + return fmt.Sprintf("%v:%v", nod.Type, nod.Value) +} + +// source is source code file. +type source struct { + rawURL string + name string + data []byte +} + +func (s *source) Name() string { return s.name } +func (s *source) Size() int64 { return int64(len(s.data)) } +func (s *source) Mode() os.FileMode { return 0 } +func (s *source) ModTime() time.Time { return time.Time{} } +func (s *source) IsDir() bool { return false } +func (s *source) Sys() interface{} { return nil } + +// walker holds the state used when building the documentation. +type walker struct { + ImportPath string + srcs map[string]*source // Source files. + fset *token.FileSet +} + +// service represents a source code control service. +type service struct { + pattern *regexp.Regexp + prefix string + get func(*http.Client, map[string]string, string, *Node, map[string]bool) ([]string, error) +} + +// services is the list of source code control services handled by gopkgdoc. +var services = []*service{ + {githubPattern, "github.com/", getGithubDoc}, + {googlePattern, "code.google.com/", getGoogleDoc}, + {bitbucketPattern, "bitbucket.org/", getBitbucketDoc}, + {launchpadPattern, "launchpad.net/", getLaunchpadDoc}, + {oscPattern, "git.oschina.net/", getOSCDoc}, +} diff --git a/doc/utils.go b/doc/utils.go index bf2f74524..271625bba 100644 --- a/doc/utils.go +++ b/doc/utils.go @@ -16,10 +16,21 @@ package doc import ( "fmt" + "os" + "path" + "regexp" "runtime" "strings" + "syscall" ) +// IsExist returns if a file or directory exists +func IsExist(path string) bool { + _, err := os.Stat(path) + return err == nil || os.IsExist(err) +} + +// Non-Windows. const ( Gray = uint8(iota + 90) Red @@ -31,6 +42,26 @@ const ( EndColor = "\033[0m" ) +// Windows. +const ( + WDefault = uintptr(iota) + WBlue + WGreen + WCyan + WRed + WPurple + WYellow + WGray + WSilver + WLightBlue + WLime + WLightCyan + WLightRed + WLightPurple + WLightYellow + WWhite +) + // ColorLog colors log and print to stdout. // Log format: [ error ]. // Level: TRAC -> blue; ERRO -> red; WARN -> Magenta; SUCC -> green; others -> default. @@ -65,6 +96,16 @@ func ColorLog(format string, a ...interface{}) { log = strings.Replace(log, " #", EndColor, -1) log = clog + log + } else { + // Level. + i := strings.Index(log, "]") + if log[0] == '[' && i > -1 { + fmt.Print("[") + printColorLevel(log[1:i]) + fmt.Print("]") + } + + log = log[i+1:] } fmt.Print(log) @@ -86,3 +127,637 @@ func getColorLevel(level string) string { return level } } + +// printColorLevel prints color level prompt, this is only for Windows. +func printColorLevel(level string) { + cc := WDefault + level = strings.ToUpper(level) + switch level { + case "TRAC": + cc = WBlue + case "ERRO": + cc = WRed + case "WARN": + cc = WPurple + case "SUCC": + cc = WGreen + default: + cc = WWhite + } + + kernel32 := syscall.NewLazyDLL("kernel32.dll") + proc := kernel32.NewProc("SetConsoleTextAttribute") + handle, _, _ := proc.Call(uintptr(syscall.Stdout), uintptr(cc)) + fmt.Print(level) + handle, _, _ = proc.Call(uintptr(syscall.Stdout), uintptr(WSilver)) + CloseHandle := kernel32.NewProc("CloseHandle") + CloseHandle.Call(handle) +} + +// GetGOPATH returns all paths in GOPATH variable. +func GetGOPATH() []string { + gopath := os.Getenv("GOPATH") + var paths []string + if runtime.GOOS == "windows" { + gopath = strings.Replace(gopath, "\\", "/", -1) + paths = strings.Split(gopath, ";") + } else { + paths = strings.Split(gopath, ":") + } + return paths +} + +// GetGOPATH returns best matched GOPATH. +func GetBestMatchGOPATH(appPath string) string { + paths := GetGOPATH() + for _, p := range paths { + if strings.HasPrefix(p, appPath) { + return strings.Replace(p, "\\", "/", -1) + } + } + return paths[0] +} + +// GetDirsInfo returns os.FileInfo of all sub-directories in root path. +func GetDirsInfo(rootPath string) ([]os.FileInfo, error) { + rootDir, err := os.Open(rootPath) + if err != nil { + return nil, err + } + defer rootDir.Close() + + dirs, err := rootDir.Readdir(0) + if err != nil { + return nil, err + } + + return dirs, err +} + +// CheckIsExistWithVCS returns false if directory only has VCS folder, +// or doesn't exist. +func CheckIsExistWithVCS(path string) bool { + // Check if directory exist. + if !IsExist(path) { + return false + } + + // Check if only has VCS folder. + dirs, err := GetDirsInfo(path) + if err != nil { + ColorLog("[ERRO] CheckIsExistWithVCS -> [ %s ]\n", err) + return false + } + + if len(dirs) > 1 { + return true + } else if len(dirs) == 0 { + return false + } + + switch dirs[0].Name() { + case ".git", ".hg", ".svn": + return false + } + + return true +} + +// CheckIsExistInGOPATH checks if given package import path exists in any path in GOPATH/src, +// and returns corresponding GOPATH. +func CheckIsExistInGOPATH(importPath string) (string, bool) { + paths := GetGOPATH() + for _, p := range paths { + if CheckIsExistWithVCS(p + "/src/" + importPath + "/") { + return p, true + } + } + return "", false +} + +// GetProjectPath returns project path of import path. +func GetProjectPath(importPath string) (projectPath string) { + projectPath = importPath + + // Check project hosting. + switch { + case strings.HasPrefix(importPath, "github.com"): + projectPath = joinPath(importPath, 3) + case strings.HasPrefix(importPath, "code.google.com"): + projectPath = joinPath(importPath, 3) + case strings.HasPrefix(importPath, "bitbucket.org"): + projectPath = joinPath(importPath, 3) + case strings.HasPrefix(importPath, "launchpad.net"): + projectPath = joinPath(importPath, 2) + } + + return projectPath +} + +func joinPath(importPath string, num int) string { + subdirs := strings.Split(importPath, "/") + if len(subdirs) > num { + return strings.Join(subdirs[:num], "/") + } + return importPath +} + +var validTLD = map[string]bool{ + // curl http://data.iana.org/TLD/tlds-alpha-by-domain.txt | sed -e '/#/ d' -e 's/.*/"&": true,/' | tr [:upper:] [:lower:] + ".ac": true, + ".ad": true, + ".ae": true, + ".aero": true, + ".af": true, + ".ag": true, + ".ai": true, + ".al": true, + ".am": true, + ".an": true, + ".ao": true, + ".aq": true, + ".ar": true, + ".arpa": true, + ".as": true, + ".asia": true, + ".at": true, + ".au": true, + ".aw": true, + ".ax": true, + ".az": true, + ".ba": true, + ".bb": true, + ".bd": true, + ".be": true, + ".bf": true, + ".bg": true, + ".bh": true, + ".bi": true, + ".biz": true, + ".bj": true, + ".bm": true, + ".bn": true, + ".bo": true, + ".br": true, + ".bs": true, + ".bt": true, + ".bv": true, + ".bw": true, + ".by": true, + ".bz": true, + ".ca": true, + ".cat": true, + ".cc": true, + ".cd": true, + ".cf": true, + ".cg": true, + ".ch": true, + ".ci": true, + ".ck": true, + ".cl": true, + ".cm": true, + ".cn": true, + ".co": true, + ".com": true, + ".coop": true, + ".cr": true, + ".cu": true, + ".cv": true, + ".cw": true, + ".cx": true, + ".cy": true, + ".cz": true, + ".de": true, + ".dj": true, + ".dk": true, + ".dm": true, + ".do": true, + ".dz": true, + ".ec": true, + ".edu": true, + ".ee": true, + ".eg": true, + ".er": true, + ".es": true, + ".et": true, + ".eu": true, + ".fi": true, + ".fj": true, + ".fk": true, + ".fm": true, + ".fo": true, + ".fr": true, + ".ga": true, + ".gb": true, + ".gd": true, + ".ge": true, + ".gf": true, + ".gg": true, + ".gh": true, + ".gi": true, + ".gl": true, + ".gm": true, + ".gn": true, + ".gov": true, + ".gp": true, + ".gq": true, + ".gr": true, + ".gs": true, + ".gt": true, + ".gu": true, + ".gw": true, + ".gy": true, + ".hk": true, + ".hm": true, + ".hn": true, + ".hr": true, + ".ht": true, + ".hu": true, + ".id": true, + ".ie": true, + ".il": true, + ".im": true, + ".in": true, + ".info": true, + ".int": true, + ".io": true, + ".iq": true, + ".ir": true, + ".is": true, + ".it": true, + ".je": true, + ".jm": true, + ".jo": true, + ".jobs": true, + ".jp": true, + ".ke": true, + ".kg": true, + ".kh": true, + ".ki": true, + ".km": true, + ".kn": true, + ".kp": true, + ".kr": true, + ".kw": true, + ".ky": true, + ".kz": true, + ".la": true, + ".lb": true, + ".lc": true, + ".li": true, + ".lk": true, + ".lr": true, + ".ls": true, + ".lt": true, + ".lu": true, + ".lv": true, + ".ly": true, + ".ma": true, + ".mc": true, + ".md": true, + ".me": true, + ".mg": true, + ".mh": true, + ".mil": true, + ".mk": true, + ".ml": true, + ".mm": true, + ".mn": true, + ".mo": true, + ".mobi": true, + ".mp": true, + ".mq": true, + ".mr": true, + ".ms": true, + ".mt": true, + ".mu": true, + ".museum": true, + ".mv": true, + ".mw": true, + ".mx": true, + ".my": true, + ".mz": true, + ".na": true, + ".name": true, + ".nc": true, + ".ne": true, + ".net": true, + ".nf": true, + ".ng": true, + ".ni": true, + ".nl": true, + ".no": true, + ".np": true, + ".nr": true, + ".nu": true, + ".nz": true, + ".om": true, + ".org": true, + ".pa": true, + ".pe": true, + ".pf": true, + ".pg": true, + ".ph": true, + ".pk": true, + ".pl": true, + ".pm": true, + ".pn": true, + ".post": true, + ".pr": true, + ".pro": true, + ".ps": true, + ".pt": true, + ".pw": true, + ".py": true, + ".qa": true, + ".re": true, + ".ro": true, + ".rs": true, + ".ru": true, + ".rw": true, + ".sa": true, + ".sb": true, + ".sc": true, + ".sd": true, + ".se": true, + ".sg": true, + ".sh": true, + ".si": true, + ".sj": true, + ".sk": true, + ".sl": true, + ".sm": true, + ".sn": true, + ".so": true, + ".sr": true, + ".st": true, + ".su": true, + ".sv": true, + ".sx": true, + ".sy": true, + ".sz": true, + ".tc": true, + ".td": true, + ".tel": true, + ".tf": true, + ".tg": true, + ".th": true, + ".tj": true, + ".tk": true, + ".tl": true, + ".tm": true, + ".tn": true, + ".to": true, + ".tp": true, + ".tr": true, + ".travel": true, + ".tt": true, + ".tv": true, + ".tw": true, + ".tz": true, + ".ua": true, + ".ug": true, + ".uk": true, + ".us": true, + ".uy": true, + ".uz": true, + ".va": true, + ".vc": true, + ".ve": true, + ".vg": true, + ".vi": true, + ".vn": true, + ".vu": true, + ".wf": true, + ".ws": true, + ".xn--0zwm56d": true, + ".xn--11b5bs3a9aj6g": true, + ".xn--3e0b707e": true, + ".xn--45brj9c": true, + ".xn--80akhbyknj4f": true, + ".xn--80ao21a": true, + ".xn--90a3ac": true, + ".xn--9t4b11yi5a": true, + ".xn--clchc0ea0b2g2a9gcd": true, + ".xn--deba0ad": true, + ".xn--fiqs8s": true, + ".xn--fiqz9s": true, + ".xn--fpcrj9c3d": true, + ".xn--fzc2c9e2c": true, + ".xn--g6w251d": true, + ".xn--gecrj9c": true, + ".xn--h2brj9c": true, + ".xn--hgbk6aj7f53bba": true, + ".xn--hlcj6aya9esc7a": true, + ".xn--j6w193g": true, + ".xn--jxalpdlp": true, + ".xn--kgbechtv": true, + ".xn--kprw13d": true, + ".xn--kpry57d": true, + ".xn--lgbbat1ad8j": true, + ".xn--mgb9awbf": true, + ".xn--mgbaam7a8h": true, + ".xn--mgbayh7gpa": true, + ".xn--mgbbh1a71e": true, + ".xn--mgbc0a9azcg": true, + ".xn--mgberp4a5d4ar": true, + ".xn--mgbx4cd0ab": true, + ".xn--o3cw4h": true, + ".xn--ogbpf8fl": true, + ".xn--p1ai": true, + ".xn--pgbs0dh": true, + ".xn--s9brj9c": true, + ".xn--wgbh1c": true, + ".xn--wgbl6a": true, + ".xn--xkc2al3hye2a": true, + ".xn--xkc2dl3a5ee0h": true, + ".xn--yfro4i67o": true, + ".xn--ygbi2ammx": true, + ".xn--zckzah": true, + ".xxx": true, + ".ye": true, + ".yt": true, + ".za": true, + ".zm": true, + ".zw": true, +} + +var ( + validHost = regexp.MustCompile(`^[-a-z0-9]+(?:\.[-a-z0-9]+)+$`) + validPathElement = regexp.MustCompile(`^[-A-Za-z0-9~+][-A-Za-z0-9_.]*$`) +) + +// IsValidRemotePath returns true if importPath is structurally valid for "go get". +func IsValidRemotePath(importPath string) bool { + + parts := strings.Split(importPath, "/") + + if len(parts) <= 1 { + // Import path must contain at least one "/". + return false + } + + if !validTLD[path.Ext(parts[0])] { + return false + } + + if !validHost.MatchString(parts[0]) { + return false + } + for _, part := range parts[1:] { + if !validPathElement.MatchString(part) || part == "testdata" { + return false + } + } + + return true +} + +var standardPath = map[string]bool{ + "builtin": true, + + // go list -f '"{{.ImportPath}}": true,' std | grep -v 'cmd/|exp/' + "cmd/api": true, + "cmd/cgo": true, + "cmd/fix": true, + "cmd/go": true, + "cmd/godoc": true, + "cmd/gofmt": true, + "cmd/vet": true, + "cmd/yacc": true, + "archive/tar": true, + "archive/zip": true, + "bufio": true, + "bytes": true, + "compress/bzip2": true, + "compress/flate": true, + "compress/gzip": true, + "compress/lzw": true, + "compress/zlib": true, + "container/heap": true, + "container/list": true, + "container/ring": true, + "crypto": true, + "crypto/aes": true, + "crypto/cipher": true, + "crypto/des": true, + "crypto/dsa": true, + "crypto/ecdsa": true, + "crypto/elliptic": true, + "crypto/hmac": true, + "crypto/md5": true, + "crypto/rand": true, + "crypto/rc4": true, + "crypto/rsa": true, + "crypto/sha1": true, + "crypto/sha256": true, + "crypto/sha512": true, + "crypto/subtle": true, + "crypto/tls": true, + "crypto/x509": true, + "crypto/x509/pkix": true, + "database/sql": true, + "database/sql/driver": true, + "debug/dwarf": true, + "debug/elf": true, + "debug/gosym": true, + "debug/macho": true, + "debug/pe": true, + "encoding/ascii85": true, + "encoding/asn1": true, + "encoding/base32": true, + "encoding/base64": true, + "encoding/binary": true, + "encoding/csv": true, + "encoding/gob": true, + "encoding/hex": true, + "encoding/json": true, + "encoding/pem": true, + "encoding/xml": true, + "errors": true, + "expvar": true, + "flag": true, + "fmt": true, + "go/ast": true, + "go/build": true, + "go/doc": true, + "go/format": true, + "go/parser": true, + "go/printer": true, + "go/scanner": true, + "go/token": true, + "hash": true, + "hash/adler32": true, + "hash/crc32": true, + "hash/crc64": true, + "hash/fnv": true, + "html": true, + "html/template": true, + "image": true, + "image/color": true, + "image/draw": true, + "image/gif": true, + "image/jpeg": true, + "image/png": true, + "index/suffixarray": true, + "io": true, + "io/ioutil": true, + "log": true, + "log/syslog": true, + "math": true, + "math/big": true, + "math/cmplx": true, + "math/rand": true, + "mime": true, + "mime/multipart": true, + "net": true, + "net/http": true, + "net/http/cgi": true, + "net/http/cookiejar": true, + "net/http/fcgi": true, + "net/http/httptest": true, + "net/http/httputil": true, + "net/http/pprof": true, + "net/mail": true, + "net/rpc": true, + "net/rpc/jsonrpc": true, + "net/smtp": true, + "net/textproto": true, + "net/url": true, + "os": true, + "os/exec": true, + "os/signal": true, + "os/user": true, + "path": true, + "path/filepath": true, + "reflect": true, + "regexp": true, + "regexp/syntax": true, + "runtime": true, + "runtime/cgo": true, + "runtime/debug": true, + "runtime/pprof": true, + "sort": true, + "strconv": true, + "strings": true, + "sync": true, + "sync/atomic": true, + "syscall": true, + "testing": true, + "testing/iotest": true, + "testing/quick": true, + "text/scanner": true, + "text/tabwriter": true, + "text/template": true, + "text/template/parse": true, + "time": true, + "unicode": true, + "unicode/utf16": true, + "unicode/utf8": true, + "unsafe": true, +} + +// IsGoRepoPath returns true if package is from standard library. +func IsGoRepoPath(importPath string) bool { + return standardPath[importPath] +} diff --git a/doc/vcs.go b/doc/vcs.go new file mode 100644 index 000000000..1a7f94926 --- /dev/null +++ b/doc/vcs.go @@ -0,0 +1,417 @@ +// Copyright 2013 gopm authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package doc + +import ( + "bytes" + "encoding/xml" + "errors" + "io" + "io/ioutil" + "log" + "net/http" + "os" + "os/exec" + "path" + "regexp" + "strconv" + "strings" + + "github.com/Unknwon/com" +) + +var ( + appPath string + autoBackup bool +) + +func SetAppConfig(path string, backup bool) { + appPath = path + autoBackup = backup +} + +// TODO: specify with command line flag +const repoRoot = "/tmp/gddo" + +var urlTemplates = []struct { + re *regexp.Regexp + template string + lineFmt string +}{ + { + regexp.MustCompile(`^git\.gitorious\.org/(?P[^/]+/[^/]+)$`), + "https://gitorious.org/{repo}/blobs/{tag}/{dir}{0}", + "#line%d", + }, + { + regexp.MustCompile(`^camlistore\.org/r/p/(?P[^/]+)$`), + "http://camlistore.org/code/?p={repo}.git;hb={tag};f={dir}{0}", + "#l%d", + }, +} + +// lookupURLTemplate finds an expand() template, match map and line number +// format for well known repositories. +func lookupURLTemplate(repo, dir, tag string) (string, map[string]string, string) { + if strings.HasPrefix(dir, "/") { + dir = dir[1:] + "/" + } + for _, t := range urlTemplates { + if m := t.re.FindStringSubmatch(repo); m != nil { + match := map[string]string{ + "dir": dir, + "tag": tag, + } + for i, name := range t.re.SubexpNames() { + if name != "" { + match[name] = m[i] + } + } + return t.template, match, t.lineFmt + } + } + return "", nil, "" +} + +type vcsCmd struct { + schemes []string + download func([]string, string, string) (string, string, error) +} + +var vcsCmds = map[string]*vcsCmd{ + "git": &vcsCmd{ + schemes: []string{"http", "https", "git"}, + download: downloadGit, + }, +} + +var lsremoteRe = regexp.MustCompile(`(?m)^([0-9a-f]{40})\s+refs/(?:tags|heads)/(.+)$`) + +func downloadGit(schemes []string, repo, savedEtag string) (string, string, error) { + var p []byte + var scheme string + for i := range schemes { + cmd := exec.Command("git", "ls-remote", "--heads", "--tags", schemes[i]+"://"+repo+".git") + log.Println(strings.Join(cmd.Args, " ")) + var err error + p, err = cmd.Output() + if err == nil { + scheme = schemes[i] + break + } + } + + if scheme == "" { + return "", "", com.NotFoundError{"VCS not found"} + } + + tags := make(map[string]string) + for _, m := range lsremoteRe.FindAllSubmatch(p, -1) { + tags[string(m[2])] = string(m[1]) + } + + tag, commit, err := bestTag(tags, "master") + if err != nil { + return "", "", err + } + + etag := scheme + "-" + commit + + if etag == savedEtag { + return "", "", errNotModified + } + + dir := path.Join(repoRoot, repo+".git") + p, err = ioutil.ReadFile(path.Join(dir, ".git/HEAD")) + switch { + case err != nil: + if err := os.MkdirAll(dir, 0777); err != nil { + return "", "", err + } + cmd := exec.Command("git", "clone", scheme+"://"+repo, dir) + log.Println(strings.Join(cmd.Args, " ")) + if err := cmd.Run(); err != nil { + return "", "", err + } + case string(bytes.TrimRight(p, "\n")) == commit: + return tag, etag, nil + default: + cmd := exec.Command("git", "fetch") + log.Println(strings.Join(cmd.Args, " ")) + cmd.Dir = dir + if err := cmd.Run(); err != nil { + return "", "", err + } + } + + cmd := exec.Command("git", "checkout", "--detach", "--force", commit) + cmd.Dir = dir + if err := cmd.Run(); err != nil { + return "", "", err + } + + return tag, etag, nil +} + +var defaultTags = map[string]string{"git": "master", "hg": "default"} + +func bestTag(tags map[string]string, defaultTag string) (string, string, error) { + if commit, ok := tags["go1"]; ok { + return "go1", commit, nil + } + if commit, ok := tags[defaultTag]; ok { + return defaultTag, commit, nil + } + return "", "", com.NotFoundError{"Tag or branch not found."} +} + +// expand replaces {k} in template with match[k] or subs[atoi(k)] if k is not in match. +func expand(template string, match map[string]string, subs ...string) string { + var p []byte + var i int + for { + i = strings.Index(template, "{") + if i < 0 { + break + } + p = append(p, template[:i]...) + template = template[i+1:] + i = strings.Index(template, "}") + if s, ok := match[template[:i]]; ok { + p = append(p, s...) + } else { + j, _ := strconv.Atoi(template[:i]) + p = append(p, subs[j]...) + } + template = template[i+1:] + } + p = append(p, template...) + return string(p) +} + +// PureDownload downloads package without version control. +func PureDownload(nod *Node, installRepoPath string, flags map[string]bool) ([]string, error) { + for _, s := range services { + if s.get == nil || !strings.HasPrefix(nod.DownloadURL, s.prefix) { + continue + } + m := s.pattern.FindStringSubmatch(nod.DownloadURL) + if m == nil { + if s.prefix != "" { + return nil, errors.New("Cannot match package service prefix by given path") + } + continue + } + match := map[string]string{"importPath": nod.DownloadURL} + for i, n := range s.pattern.SubexpNames() { + if n != "" { + match[n] = m[i] + } + } + return s.get(HttpClient, match, installRepoPath, nod, flags) + } + + ColorLog("[TRAC] Cannot match any service, getting dynamic...\n") + return getDynamic(HttpClient, nod, installRepoPath, flags) +} + +func getDynamic(client *http.Client, nod *Node, installRepoPath string, flags map[string]bool) ([]string, error) { + match, err := fetchMeta(client, nod.ImportPath) + if err != nil { + return nil, err + } + + if match["projectRoot"] != nod.ImportPath { + rootMatch, err := fetchMeta(client, match["projectRoot"]) + if err != nil { + return nil, err + } + if rootMatch["projectRoot"] != match["projectRoot"] { + return nil, com.NotFoundError{"Project root mismatch."} + } + } + + nod.DownloadURL = expand("{repo}{dir}", match) + return PureDownload(nod, installRepoPath, flags) +} + +func fetchMeta(client *http.Client, importPath string) (map[string]string, error) { + uri := importPath + if !strings.Contains(uri, "/") { + // Add slash for root of domain. + uri = uri + "/" + } + uri = uri + "?go-get=1" + + scheme := "https" + resp, err := client.Get(scheme + "://" + uri) + if err != nil || resp.StatusCode != 200 { + if err == nil { + resp.Body.Close() + } + scheme = "http" + resp, err = client.Get(scheme + "://" + uri) + if err != nil { + return nil, &com.RemoteError{strings.SplitN(importPath, "/", 2)[0], err} + } + } + defer resp.Body.Close() + return parseMeta(scheme, importPath, resp.Body) +} + +func attrValue(attrs []xml.Attr, name string) string { + for _, a := range attrs { + if strings.EqualFold(a.Name.Local, name) { + return a.Value + } + } + return "" +} + +func parseMeta(scheme, importPath string, r io.Reader) (map[string]string, error) { + var match map[string]string + + d := xml.NewDecoder(r) + d.Strict = false +metaScan: + for { + t, tokenErr := d.Token() + if tokenErr != nil { + break metaScan + } + switch t := t.(type) { + case xml.EndElement: + if strings.EqualFold(t.Name.Local, "head") { + break metaScan + } + case xml.StartElement: + if strings.EqualFold(t.Name.Local, "body") { + break metaScan + } + if !strings.EqualFold(t.Name.Local, "meta") || + attrValue(t.Attr, "name") != "go-import" { + continue metaScan + } + f := strings.Fields(attrValue(t.Attr, "content")) + if len(f) != 3 || + !strings.HasPrefix(importPath, f[0]) || + !(len(importPath) == len(f[0]) || importPath[len(f[0])] == '/') { + continue metaScan + } + if match != nil { + return nil, com.NotFoundError{"More than one found at " + scheme + "://" + importPath} + } + + projectRoot, vcs, repo := f[0], f[1], f[2] + + repo = strings.TrimSuffix(repo, "."+vcs) + i := strings.Index(repo, "://") + if i < 0 { + return nil, com.NotFoundError{"Bad repo URL in ."} + } + proto := repo[:i] + repo = repo[i+len("://"):] + + match = map[string]string{ + // Used in getVCSDoc, same as vcsPattern matches. + "importPath": importPath, + "repo": repo, + "vcs": vcs, + "dir": importPath[len(projectRoot):], + + // Used in getVCSDoc + "scheme": proto, + + // Used in getDynamic. + "projectRoot": projectRoot, + "projectName": path.Base(projectRoot), + "projectURL": scheme + "://" + projectRoot, + } + } + } + if match == nil { + return nil, com.NotFoundError{" not found."} + } + return match, nil +} + +func getImports(rootPath string, match map[string]string, cmdFlags map[string]bool) (imports []string) { + dirs, err := GetDirsInfo(rootPath) + if err != nil { + return nil + } + + for _, d := range dirs { + if d.IsDir() && !(!cmdFlags["-e"] && strings.Contains(d.Name(), "example")) { + absPath := rootPath + d.Name() + "/" + importPkgs, err := CheckImports(absPath, match["importPath"]) + if err != nil { + return nil + } + imports = append(imports, importPkgs...) + } + } + return imports +} + +// checkImports checks package denpendencies. +func CheckImports(absPath, importPath string) (importPkgs []string, err error) { + dir, err := os.Open(absPath) + if err != nil { + return nil, err + } + defer dir.Close() + + // Get file info slice. + fis, err := dir.Readdir(0) + if err != nil { + return nil, err + } + + files := make([]*source, 0, 10) + for _, fi := range fis { + // Only handle files. + if strings.HasSuffix(fi.Name(), ".go") { + f, err := os.Open(absPath + fi.Name()) + if err != nil { + return nil, err + } + + fbytes := make([]byte, fi.Size()) + _, err = f.Read(fbytes) + f.Close() + + if err != nil { + return nil, err + } + + files = append(files, &source{ + name: fi.Name(), + data: fbytes, + }) + } + } + + // Check if has Go source files. + if len(files) > 0 { + w := &walker{ImportPath: importPath} + importPkgs, err = w.build(files) + if err != nil { + return nil, err + } + } + + return importPkgs, err +} diff --git a/doc/walker.go b/doc/walker.go new file mode 100644 index 000000000..797c301ae --- /dev/null +++ b/doc/walker.go @@ -0,0 +1,144 @@ +// Copyright 2013 gopm authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package doc + +import ( + "bytes" + "go/ast" + "go/build" + "go/parser" + "go/token" + "io" + "io/ioutil" + "os" + "path" + "runtime" + "strings" +) + +type sliceWriter struct{ p *[]byte } + +func (w sliceWriter) Write(p []byte) (int, error) { + *w.p = append(*w.p, p...) + return len(p), nil +} + +func (w *walker) readDir(dir string) ([]os.FileInfo, error) { + if dir != w.ImportPath { + panic("unexpected") + } + fis := make([]os.FileInfo, 0, len(w.srcs)) + for _, src := range w.srcs { + fis = append(fis, src) + } + return fis, nil +} + +func (w *walker) openFile(path string) (io.ReadCloser, error) { + if strings.HasPrefix(path, w.ImportPath+"/") { + if src, ok := w.srcs[path[len(w.ImportPath)+1:]]; ok { + return ioutil.NopCloser(bytes.NewReader(src.data)), nil + } + } + panic("unexpected") +} + +func simpleImporter(imports map[string]*ast.Object, path string) (*ast.Object, error) { + pkg := imports[path] + if pkg == nil { + // Guess the package name without importing it. Start with the last + // element of the path. + name := path[strings.LastIndex(path, "/")+1:] + + // Trim commonly used prefixes and suffixes containing illegal name + // runes. + name = strings.TrimSuffix(name, ".go") + name = strings.TrimSuffix(name, "-go") + name = strings.TrimPrefix(name, "go.") + name = strings.TrimPrefix(name, "go-") + name = strings.TrimPrefix(name, "biogo.") + + // It's also common for the last element of the path to contain an + // extra "go" prefix, but not always. TODO: examine unresolved ids to + // detect when trimming the "go" prefix is appropriate. + + pkg = ast.NewObj(ast.Pkg, name) + pkg.Data = ast.NewScope(nil) + imports[path] = pkg + } + return pkg, nil +} + +// build gets imports from source files. +func (w *walker) build(srcs []*source) ([]string, error) { + // Add source files to walker, I skipped references here. + w.srcs = make(map[string]*source) + for _, src := range srcs { + w.srcs[src.name] = src + } + + w.fset = token.NewFileSet() + + // Find the package and associated files. + ctxt := build.Context{ + GOOS: runtime.GOOS, + GOARCH: runtime.GOARCH, + CgoEnabled: true, + JoinPath: path.Join, + IsAbsPath: path.IsAbs, + SplitPathList: func(list string) []string { return strings.Split(list, ":") }, + IsDir: func(path string) bool { panic("unexpected") }, + HasSubdir: func(root, dir string) (rel string, ok bool) { panic("unexpected") }, + ReadDir: func(dir string) (fi []os.FileInfo, err error) { return w.readDir(dir) }, + OpenFile: func(path string) (r io.ReadCloser, err error) { return w.openFile(path) }, + Compiler: "gc", + } + + bpkg, err := ctxt.ImportDir(w.ImportPath, 0) + // Continue if there are no Go source files; we still want the directory info. + _, nogo := err.(*build.NoGoError) + if err != nil { + if nogo { + err = nil + } else { + ColorLog("[WARN] Error occurs when check imports[ %s ]\n", err) + return nil, nil + } + } + + // Parse the Go files + + files := make(map[string]*ast.File) + for _, name := range append(bpkg.GoFiles, bpkg.CgoFiles...) { + file, err := parser.ParseFile(w.fset, name, w.srcs[name].data, parser.ParseComments) + if err != nil { + //beego.Error("doc.walker.build():", err) + continue + } + files[name] = file + } + + w.ImportPath = strings.Replace(w.ImportPath, "\\", "/", -1) + var imports []string + for _, v := range bpkg.Imports { + // Skip strandard library. + if !IsGoRepoPath(v) && + (GetProjectPath(v) != GetProjectPath(w.ImportPath)) { + imports = append(imports, v) + } + } + + return imports, err +} diff --git a/gopm.go b/gopm.go index 2f774535a..b45cae609 100644 --- a/gopm.go +++ b/gopm.go @@ -26,14 +26,15 @@ import ( "unicode" "unicode/utf8" - "./cmd" + "github.com/Unknwon/com" + "github.com/gpmgo/gopm/cmd" ) // +build go1.1 // Test that go1.1 tag above is included in builds. main.go refers to this definition. const go11tag = true -const APP_VER = "0.1.0.0813" +const APP_VER = "0.2.5.0827" var ( config map[string]interface{} @@ -71,6 +72,14 @@ var commands = []*cmd.Command{ // bacause we need to get execute path in runtime. func initialize() bool { runtime.GOMAXPROCS(runtime.NumCPU()) + + // Get application execute path. + var err error + cmd.AppPath, err = com.GetSrcPath("github.com/gpmgoo/gopm") + if err != nil { + return false + } + return true }