OLD | NEW |
1 #!/usr/bin/python | 1 #!/usr/bin/python |
2 # Copyright (c) 2008-2010 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2008-2010 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """Generate index.html files for a Google Storage for Developers directory. | 6 """Generate index.html files for a Google Storage for Developers directory. |
7 | 7 |
8 Google Storage for Developers provides only a raw set of objects. | 8 Google Storage for Developers provides only a raw set of objects. |
9 For some buckets we would like to be able to support browsing of the directory | 9 For some buckets we would like to be able to support browsing of the directory |
10 tree. This utility will generate the needed index and upload/update it. | 10 tree. This utility will generate the needed index and upload/update it. |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
49 | 49 |
50 def GetPathInfo(path, options): | 50 def GetPathInfo(path, options): |
51 """Collect size, date, md5 for a give gsd path.""" | 51 """Collect size, date, md5 for a give gsd path.""" |
52 # Check current state. | 52 # Check current state. |
53 cmd = [options.gsutil, 'ls', '-L', path] | 53 cmd = [options.gsutil, 'ls', '-L', path] |
54 p = subprocess.Popen(cmd, stdout=subprocess.PIPE) | 54 p = subprocess.Popen(cmd, stdout=subprocess.PIPE) |
55 p_stdout, _ = p.communicate() | 55 p_stdout, _ = p.communicate() |
56 assert p.returncode == 0 | 56 assert p.returncode == 0 |
57 # Extract intersting fields. | 57 # Extract intersting fields. |
58 fields = {} | 58 fields = {} |
59 fields['size'] = FixupSize(re.search('\tObject size:\t([0-9]+)\n', | 59 size_search = re.search('\tContent-Length:\t([0-9]+)\n', p_stdout) |
60 p_stdout).group(1)) | 60 if size_search: |
61 fields['md5'] = re.search('\t(MD5|Etag):\t([^\n]+)\n', p_stdout).group(2) | 61 fields['size'] = FixupSize(size_search.group(1)) |
62 fields['date'] = re.search('\tLast mod:\t([^\n]+)\n', p_stdout).group(1) | 62 else: |
| 63 fields['size'] = None |
| 64 |
| 65 md5_search = re.search('\t(MD5|Etag):\t([^\n]+)\n', p_stdout) |
| 66 if md5_search: |
| 67 fields['md5'] = md5_search.group(2) |
| 68 else: |
| 69 fields['md5'] = None |
| 70 |
| 71 date_search = re.search('\tCreation time:\t([^\n]+)\n', p_stdout) |
| 72 if date_search: |
| 73 fields['date'] = date_search.group(1) |
| 74 else: |
| 75 fields['date'] = None |
| 76 |
63 return fields | 77 return fields |
64 | 78 |
65 | 79 |
66 def GenerateIndex(path, children, directories, options): | 80 def GenerateIndex(path, children, directories, options): |
67 """Generate index for a given path as needed.""" | 81 """Generate index for a given path as needed.""" |
68 # Generate index content. | 82 # Generate index content. |
69 index = '' | 83 index = '' |
70 index += '<html>' | 84 index += '<html>' |
71 index += '<head>' | 85 index += '<head>' |
72 index += '<title>Index of %s</title>' % path | 86 index += '<title>Index of %s</title>' % path |
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
210 dest='gsutil', help='path to gsutil') | 224 dest='gsutil', help='path to gsutil') |
211 options, args = parser.parse_args(argv) | 225 options, args = parser.parse_args(argv) |
212 if len(args) != 2 or not args[1].startswith('gs://'): | 226 if len(args) != 2 or not args[1].startswith('gs://'): |
213 parser.print_help() | 227 parser.print_help() |
214 return 1 | 228 return 1 |
215 return GenerateIndexes(args[1], options) | 229 return GenerateIndexes(args[1], options) |
216 | 230 |
217 | 231 |
218 if __name__ == '__main__': | 232 if __name__ == '__main__': |
219 sys.exit(main(sys.argv)) | 233 sys.exit(main(sys.argv)) |
OLD | NEW |