Merge pull request #664 from jamieoliver/feature/gollum-lib
Move gollum back end to gollum-lib #647
@@ -153,7 +153,7 @@ if options['irb']
|
||||
exit 0
|
||||
end
|
||||
else
|
||||
require 'gollum/frontend/app'
|
||||
require 'gollum/app'
|
||||
Precious::App.set(:gollum_path, gollum_path)
|
||||
Precious::App.set(:wiki_options, wiki_options)
|
||||
|
||||
|
||||
@@ -23,14 +23,9 @@ Gem::Specification.new do |s|
|
||||
s.rdoc_options = ["--charset=UTF-8"]
|
||||
s.extra_rdoc_files = %w[README.md LICENSE]
|
||||
|
||||
s.add_dependency('grit', '~> 2.5.0')
|
||||
s.add_dependency('github-markup', ['>= 0.7.5', '< 1.0.0'])
|
||||
s.add_dependency('github-markdown', '~> 0.5.3')
|
||||
s.add_dependency('pygments.rb', '~> 0.4.2')
|
||||
s.add_dependency('gollum-lib', '~> 0.0.1')
|
||||
s.add_dependency('sinatra', '~> 1.3.5')
|
||||
s.add_dependency('mustache', ['>= 0.99.4', '< 1.0.0'])
|
||||
s.add_dependency('sanitize', '~> 2.0.3')
|
||||
s.add_dependency('nokogiri', '~> 1.5.6')
|
||||
s.add_dependency('useragent', '~> 0.4.16')
|
||||
s.add_dependency('stringex', '~> 1.5.1')
|
||||
|
||||
@@ -44,6 +39,7 @@ Gem::Specification.new do |s|
|
||||
s.add_development_dependency('pry', '~> 0.9.12')
|
||||
# required by pry
|
||||
s.add_development_dependency('rb-readline', '~> 0.4.2')
|
||||
s.add_development_dependency('minitest-reporters', '>= 0.5.0')
|
||||
|
||||
# = MANIFEST =
|
||||
s.files = %w[
|
||||
|
||||
@@ -6,24 +6,11 @@ require 'ostruct'
|
||||
|
||||
# external
|
||||
require 'grit'
|
||||
require File.expand_path('../gollum/grit_ext', __FILE__)
|
||||
require 'github/markup'
|
||||
require 'sanitize'
|
||||
|
||||
# internal
|
||||
require File.expand_path('../gollum/git_access', __FILE__)
|
||||
require File.expand_path('../gollum/committer', __FILE__)
|
||||
require File.expand_path('../gollum/pagination', __FILE__)
|
||||
require File.expand_path('../gollum/blob_entry', __FILE__)
|
||||
require File.expand_path('../gollum/wiki', __FILE__)
|
||||
require File.expand_path('../gollum/page', __FILE__)
|
||||
require File.expand_path('../gollum/file', __FILE__)
|
||||
require File.expand_path('../gollum/file_view', __FILE__)
|
||||
require File.expand_path('../gollum/markup', __FILE__)
|
||||
require File.expand_path('../gollum/markups', __FILE__)
|
||||
require File.expand_path('../gollum/sanitization', __FILE__)
|
||||
require File.expand_path('../gollum/web_sequence_diagram', __FILE__)
|
||||
require File.expand_path('../gollum/frontend/uri_encode_component', __FILE__)
|
||||
require File.expand_path('../gollum/uri_encode_component', __FILE__)
|
||||
|
||||
# Set ruby to UTF-8 mode
|
||||
# This is required for Ruby 1.8.7 which gollum still supports.
|
||||
@@ -33,7 +20,7 @@ module Gollum
|
||||
VERSION = '2.4.11'
|
||||
|
||||
def self.assets_path
|
||||
::File.expand_path('gollum/frontend/public', ::File.dirname(__FILE__))
|
||||
::File.expand_path('gollum/public', ::File.dirname(__FILE__))
|
||||
end
|
||||
|
||||
class Error < StandardError; end
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
# ~*~ encoding: utf-8 ~*~
|
||||
require 'cgi'
|
||||
require 'sinatra'
|
||||
require 'gollum'
|
||||
require 'gollum-lib'
|
||||
require 'mustache/sinatra'
|
||||
require 'useragent'
|
||||
require 'stringex'
|
||||
|
||||
require 'gollum/frontend/views/layout'
|
||||
require 'gollum/frontend/views/editable'
|
||||
require 'gollum/frontend/views/has_page'
|
||||
require 'gollum/views/layout'
|
||||
require 'gollum/views/editable'
|
||||
require 'gollum/views/has_page'
|
||||
|
||||
require File.expand_path '../helpers', __FILE__
|
||||
|
||||
@@ -27,7 +27,7 @@ end
|
||||
# There are a number of wiki options that can be set for the frontend
|
||||
#
|
||||
# Example
|
||||
# require 'gollum/frontend/app'
|
||||
# require 'gollum/app'
|
||||
# Precious::App.set(:wiki_options, {
|
||||
# :universal_toc => false,
|
||||
# }
|
||||
@@ -1,95 +0,0 @@
|
||||
# ~*~ encoding: utf-8 ~*~
|
||||
module Gollum
|
||||
class BlobEntry
|
||||
# Gets the String SHA for this blob.
|
||||
attr_reader :sha
|
||||
|
||||
# Gets the full path String for this blob.
|
||||
attr_reader :path
|
||||
|
||||
# Gets the Fixnum size of this blob.
|
||||
attr_reader :size
|
||||
|
||||
# Gets the Fixnum mode of this blob.
|
||||
attr_reader :mode
|
||||
|
||||
def initialize(sha, path, size = nil, mode = nil)
|
||||
@sha = sha
|
||||
@path = path
|
||||
@size = size
|
||||
@mode = mode
|
||||
@dir = @name = @blob = nil
|
||||
end
|
||||
|
||||
# Gets the normalized directory path String for this blob.
|
||||
def dir
|
||||
@dir ||= self.class.normalize_dir(::File.dirname(@path))
|
||||
end
|
||||
|
||||
# Gets the file base name String for this blob.
|
||||
def name
|
||||
@name ||= ::File.basename(@path)
|
||||
end
|
||||
|
||||
# Gets a Grit::Blob instance for this blob.
|
||||
#
|
||||
# repo - Grit::Repo instance for the Grit::Blob.
|
||||
#
|
||||
# Returns an unbaked Grit::Blob instance.
|
||||
def blob(repo)
|
||||
@blob ||= Grit::Blob.create(repo,
|
||||
:id => @sha, :name => name, :size => @size, :mode => @mode)
|
||||
end
|
||||
|
||||
# Gets a Page instance for this blob.
|
||||
#
|
||||
# wiki - Gollum::Wiki instance for the Gollum::Page
|
||||
#
|
||||
# Returns a Gollum::Page instance.
|
||||
def page(wiki, commit)
|
||||
blob = self.blob(wiki.repo)
|
||||
page = wiki.page_class.new(wiki).populate(blob, self.dir)
|
||||
page.version = commit
|
||||
page
|
||||
end
|
||||
|
||||
# Gets a File instance for this blob.
|
||||
#
|
||||
# wiki - Gollum::Wiki instance for the Gollum::File
|
||||
#
|
||||
# Returns a Gollum::File instance.
|
||||
def file(wiki, commit)
|
||||
blob = self.blob(wiki.repo)
|
||||
file = wiki.file_class.new(wiki).populate(blob, self.dir)
|
||||
file.version = commit
|
||||
file
|
||||
end
|
||||
|
||||
def inspect
|
||||
%(#<Gollum::BlobEntry #{@sha} #{@path}>)
|
||||
end
|
||||
|
||||
# Normalizes a given directory name for searching through tree paths.
|
||||
# Ensures that a directory begins with a slash, or
|
||||
#
|
||||
# normalize_dir("") # => ""
|
||||
# normalize_dir(".") # => ""
|
||||
# normalize_dir("foo") # => "/foo"
|
||||
# normalize_dir("/foo/") # => "/foo"
|
||||
# normalize_dir("/") # => ""
|
||||
# normalize_dir("c:/") # => ""
|
||||
#
|
||||
# dir - String directory name.
|
||||
#
|
||||
# Returns a normalized String directory name, or nil if no directory
|
||||
# is given.
|
||||
def self.normalize_dir(dir)
|
||||
return '' if dir =~ /^.:\/$/
|
||||
if dir
|
||||
dir = ::File.expand_path(dir, '/')
|
||||
dir = '' if dir == '/'
|
||||
end
|
||||
dir
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,236 +0,0 @@
|
||||
# ~*~ encoding: utf-8 ~*~
|
||||
module Gollum
|
||||
# Responsible for handling the commit process for a Wiki. It sets up the
|
||||
# Git index, provides methods for modifying the tree, and stores callbacks
|
||||
# to be fired after the commit has been made. This is specifically
|
||||
# designed to handle multiple updated pages in a single commit.
|
||||
class Committer
|
||||
# Gets the instance of the Gollum::Wiki that is being updated.
|
||||
attr_reader :wiki
|
||||
|
||||
# Gets a Hash of commit options.
|
||||
attr_reader :options
|
||||
|
||||
# Initializes the Committer.
|
||||
#
|
||||
# wiki - The Gollum::Wiki instance that is being updated.
|
||||
# options - The commit Hash details:
|
||||
# :message - The String commit message.
|
||||
# :name - The String author full name.
|
||||
# :email - The String email address.
|
||||
# :parent - Optional Grit::Commit parent to this update.
|
||||
# :tree - Optional String SHA of the tree to create the
|
||||
# index from.
|
||||
# :committer - Optional Gollum::Committer instance. If provided,
|
||||
# assume that this operation is part of batch of
|
||||
# updates and the commit happens later.
|
||||
#
|
||||
# Returns the Committer instance.
|
||||
def initialize(wiki, options = {})
|
||||
@wiki = wiki
|
||||
@options = options
|
||||
@callbacks = []
|
||||
end
|
||||
|
||||
# Public: References the Git index for this commit.
|
||||
#
|
||||
# Returns a Grit::Index.
|
||||
def index
|
||||
@index ||= begin
|
||||
idx = @wiki.repo.index
|
||||
if tree = options[:tree]
|
||||
idx.read_tree(tree)
|
||||
elsif parent = parents.first
|
||||
idx.read_tree(parent.tree.id)
|
||||
end
|
||||
idx
|
||||
end
|
||||
end
|
||||
|
||||
# Public: The committer for this commit.
|
||||
#
|
||||
# Returns a Grit::Actor.
|
||||
def actor
|
||||
@actor ||= begin
|
||||
@options[:name] = @wiki.default_committer_name if @options[:name].to_s.empty?
|
||||
@options[:email] = @wiki.default_committer_email if @options[:email].to_s.empty?
|
||||
Grit::Actor.new(@options[:name], @options[:email])
|
||||
end
|
||||
end
|
||||
|
||||
# Public: The parent commits to this pending commit.
|
||||
#
|
||||
# Returns an array of Grit::Commit instances.
|
||||
def parents
|
||||
@parents ||= begin
|
||||
arr = [@options[:parent] || @wiki.repo.commit(@wiki.ref)]
|
||||
arr.flatten!
|
||||
arr.compact!
|
||||
arr
|
||||
end
|
||||
end
|
||||
|
||||
# Adds a page to the given Index.
|
||||
#
|
||||
# dir - The String subdirectory of the Gollum::Page without any
|
||||
# prefix or suffix slashes (e.g. "foo/bar").
|
||||
# name - The String Gollum::Page filename_stripped.
|
||||
# format - The Symbol Gollum::Page format.
|
||||
# data - The String wiki data to store in the tree map.
|
||||
# allow_same_ext - A Boolean determining if the tree map allows the same
|
||||
# filename with the same extension.
|
||||
#
|
||||
# Raises Gollum::DuplicatePageError if a matching filename already exists.
|
||||
# This way, pages are not inadvertently overwritten.
|
||||
#
|
||||
# Returns nothing (modifies the Index in place).
|
||||
def add_to_index(dir, name, format, data, allow_same_ext = false)
|
||||
# spaces must be dashes
|
||||
dir.gsub!(' ', '-')
|
||||
name.gsub!(' ', '-')
|
||||
|
||||
path = @wiki.page_file_name(name, format)
|
||||
|
||||
dir = '/' if dir.strip.empty?
|
||||
|
||||
fullpath = ::File.join(*[@wiki.page_file_dir, dir, path].compact)
|
||||
fullpath = fullpath[1..-1] if fullpath =~ /^\//
|
||||
|
||||
if index.current_tree && tree = index.current_tree / (@wiki.page_file_dir || '/')
|
||||
tree = tree / dir unless tree.nil?
|
||||
end
|
||||
|
||||
if tree
|
||||
downpath = path.downcase.sub(/\.\w+$/, '')
|
||||
|
||||
tree.blobs.each do |blob|
|
||||
next if page_path_scheduled_for_deletion?(index.tree, fullpath)
|
||||
|
||||
existing_file = blob.name.downcase.sub(/\.\w+$/, '')
|
||||
existing_file_ext = ::File.extname(blob.name).sub(/^\./, '')
|
||||
|
||||
new_file_ext = ::File.extname(path).sub(/^\./, '')
|
||||
|
||||
if downpath == existing_file && !(allow_same_ext && new_file_ext == existing_file_ext)
|
||||
raise DuplicatePageError.new(dir, blob.name, path)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
fullpath = fullpath.force_encoding('ascii-8bit') if fullpath.respond_to?(:force_encoding)
|
||||
|
||||
index.add(fullpath, @wiki.normalize(data))
|
||||
end
|
||||
|
||||
# Update the given file in the repository's working directory if there
|
||||
# is a working directory present.
|
||||
#
|
||||
# dir - The String directory in which the file lives.
|
||||
# name - The String name of the page or the stripped filename
|
||||
# (should be pre-canonicalized if required).
|
||||
# format - The Symbol format of the page.
|
||||
#
|
||||
# Returns nothing.
|
||||
def update_working_dir(dir, name, format)
|
||||
unless @wiki.repo.bare
|
||||
if @wiki.page_file_dir
|
||||
dir = dir.size.zero? ? @wiki.page_file_dir : ::File.join(dir, @wiki.page_file_dir)
|
||||
end
|
||||
|
||||
path =
|
||||
if dir == ''
|
||||
@wiki.page_file_name(name, format)
|
||||
else
|
||||
::File.join(dir, @wiki.page_file_name(name, format))
|
||||
end
|
||||
|
||||
path = path.force_encoding('ascii-8bit') if path.respond_to?(:force_encoding)
|
||||
|
||||
Dir.chdir(::File.join(@wiki.repo.path, '..')) do
|
||||
if file_path_scheduled_for_deletion?(index.tree, path)
|
||||
@wiki.repo.git.rm({'f' => true}, '--', path)
|
||||
else
|
||||
@wiki.repo.git.checkout({}, 'HEAD', '--', path)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Writes the commit to Git and runs the after_commit callbacks.
|
||||
#
|
||||
# Returns the String SHA1 of the new commit.
|
||||
def commit
|
||||
sha1 = index.commit(@options[:message], parents, actor, nil, @wiki.ref)
|
||||
@callbacks.each do |cb|
|
||||
cb.call(self, sha1)
|
||||
end
|
||||
sha1
|
||||
end
|
||||
|
||||
# Adds a callback to be fired after a commit.
|
||||
#
|
||||
# block - A block that expects this Committer instance and the created
|
||||
# commit's SHA1 as the arguments.
|
||||
#
|
||||
# Returns nothing.
|
||||
def after_commit(&block)
|
||||
@callbacks << block
|
||||
end
|
||||
|
||||
# Determine if a given page (regardless of format) is scheduled to be
|
||||
# deleted in the next commit for the given Index.
|
||||
#
|
||||
# map - The Hash map:
|
||||
# key - The String directory or filename.
|
||||
# val - The Hash submap or the String contents of the file.
|
||||
# path - The String path of the page file. This may include the format
|
||||
# extension in which case it will be ignored.
|
||||
#
|
||||
# Returns the Boolean response.
|
||||
def page_path_scheduled_for_deletion?(map, path)
|
||||
parts = path.split('/')
|
||||
if parts.size == 1
|
||||
deletions = map.keys.select { |k| !map[k] }
|
||||
downfile = parts.first.downcase.sub(/\.\w+$/, '')
|
||||
deletions.any? { |d| d.downcase.sub(/\.\w+$/, '') == downfile }
|
||||
else
|
||||
part = parts.shift
|
||||
if rest = map[part]
|
||||
page_path_scheduled_for_deletion?(rest, parts.join('/'))
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Determine if a given file is scheduled to be deleted in the next commit
|
||||
# for the given Index.
|
||||
#
|
||||
# map - The Hash map:
|
||||
# key - The String directory or filename.
|
||||
# val - The Hash submap or the String contents of the file.
|
||||
# path - The String path of the file including extension.
|
||||
#
|
||||
# Returns the Boolean response.
|
||||
def file_path_scheduled_for_deletion?(map, path)
|
||||
parts = path.split('/')
|
||||
if parts.size == 1
|
||||
deletions = map.keys.select { |k| !map[k] }
|
||||
deletions.any? { |d| d == parts.first }
|
||||
else
|
||||
part = parts.shift
|
||||
if rest = map[part]
|
||||
file_path_scheduled_for_deletion?(rest, parts.join('/'))
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Proxies methods t
|
||||
def method_missing(name, *args)
|
||||
args.map! { |item| item.respond_to?(:force_encoding) ? item.force_encoding('ascii-8bit') : item }
|
||||
index.send(name, *args)
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,101 +0,0 @@
|
||||
# ~*~ encoding: utf-8 ~*~
|
||||
module Gollum
|
||||
class File
|
||||
Wiki.file_class = self
|
||||
|
||||
# Public: Initialize a file.
|
||||
#
|
||||
# wiki - The Gollum::Wiki in question.
|
||||
#
|
||||
# Returns a newly initialized Gollum::File.
|
||||
def initialize(wiki)
|
||||
@wiki = wiki
|
||||
@blob = nil
|
||||
@path = nil
|
||||
end
|
||||
|
||||
# Public: The url path required to reach this page within the repo.
|
||||
#
|
||||
# Returns the String url_path
|
||||
def url_path
|
||||
path = self.path
|
||||
path = path.sub(/\/[^\/]+$/, '/') if path.include?('/')
|
||||
path
|
||||
end
|
||||
|
||||
# Public: The url_path, but CGI escaped.
|
||||
#
|
||||
# Returns the String url_path
|
||||
def escaped_url_path
|
||||
CGI.escape(self.url_path).gsub('%2F','/')
|
||||
end
|
||||
|
||||
# Public: The on-disk filename of the file.
|
||||
#
|
||||
# Returns the String name.
|
||||
def name
|
||||
@blob && @blob.name
|
||||
end
|
||||
alias filename name
|
||||
|
||||
# Public: The raw contents of the page.
|
||||
#
|
||||
# Returns the String data.
|
||||
def raw_data
|
||||
return nil unless @blob
|
||||
|
||||
if !@wiki.repo.bare && @blob.is_symlink
|
||||
new_path = @blob.symlink_target(::File.join(@wiki.repo.path, '..', self.path))
|
||||
return IO.read(new_path) if new_path
|
||||
end
|
||||
|
||||
@blob.data
|
||||
end
|
||||
|
||||
# Public: The Grit::Commit version of the file.
|
||||
attr_accessor :version
|
||||
|
||||
# Public: The String path of the file.
|
||||
attr_reader :path
|
||||
|
||||
# Public: The String mime type of the file.
|
||||
def mime_type
|
||||
@blob.mime_type
|
||||
end
|
||||
|
||||
# Populate the File with information from the Blob.
|
||||
#
|
||||
# blob - The Grit::Blob that contains the info.
|
||||
# path - The String directory path of the file.
|
||||
#
|
||||
# Returns the populated Gollum::File.
|
||||
def populate(blob, path=nil)
|
||||
@blob = blob
|
||||
@path = "#{path}/#{blob.name}"[1..-1]
|
||||
self
|
||||
end
|
||||
|
||||
#########################################################################
|
||||
#
|
||||
# Internal Methods
|
||||
#
|
||||
#########################################################################
|
||||
|
||||
# Find a file in the given Gollum repo.
|
||||
#
|
||||
# name - The full String path.
|
||||
# version - The String version ID to find.
|
||||
#
|
||||
# Returns a Gollum::File or nil if the file could not be found.
|
||||
def find(name, version)
|
||||
checked = name.downcase
|
||||
map = @wiki.tree_map_for(version)
|
||||
if entry = map.detect { |entry| entry.path.downcase == checked }
|
||||
@path = name
|
||||
@blob = entry.blob(@wiki.repo)
|
||||
@version = version.is_a?(Grit::Commit) ? version : @wiki.commit_for(version)
|
||||
self
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,155 +0,0 @@
|
||||
# ~*~ encoding: utf-8 ~*~
|
||||
module Gollum
|
||||
=begin
|
||||
FileView requires that:
|
||||
- All files in root dir are processed first
|
||||
- Then all the folders are sorted and processed
|
||||
=end
|
||||
class FileView
|
||||
# common use cases:
|
||||
# set pages to wiki.pages and show_all to false
|
||||
# set pages to wiki.pages + wiki.files and show_all to true
|
||||
def initialize pages, options = {}
|
||||
@pages = pages
|
||||
@show_all = options[:show_all] || false
|
||||
@checked = options[:collapse_tree] ? '' : "checked"
|
||||
end
|
||||
|
||||
def enclose_tree string
|
||||
%Q(<ol class="tree">\n) + string + %Q(</ol>)
|
||||
end
|
||||
|
||||
def new_page page
|
||||
name = page.name
|
||||
url = url_for_page page
|
||||
%Q( <li class="file"><a href="#{url}"><span class="icon"></span>#{name}</a></li>)
|
||||
end
|
||||
|
||||
def new_folder folder_path
|
||||
new_sub_folder folder_path
|
||||
end
|
||||
|
||||
def new_sub_folder path
|
||||
<<-HTML
|
||||
<li>
|
||||
<label>#{path}</label> <input type="checkbox" #{@checked} />
|
||||
<ol>
|
||||
HTML
|
||||
end
|
||||
|
||||
def end_folder
|
||||
"</ol></li>\n"
|
||||
end
|
||||
|
||||
def url_for_page page
|
||||
url = ''
|
||||
if @show_all
|
||||
# Remove ext for valid pages.
|
||||
filename = page.filename
|
||||
filename = Page::valid_page_name?(filename) ? filename.chomp(::File.extname(filename)) : filename
|
||||
|
||||
url = ::File.join(::File.dirname(page.path), filename)
|
||||
else
|
||||
url = ::File.join(::File.dirname(page.path), page.filename_stripped)
|
||||
end
|
||||
url = url[2..-1] if url[0,2] == './'
|
||||
url
|
||||
end
|
||||
|
||||
def render_files
|
||||
html = ''
|
||||
count = @pages.size
|
||||
folder_start = -1
|
||||
|
||||
# Process all pages until folders start
|
||||
count.times do | index |
|
||||
page = @pages[ index ]
|
||||
path = page.path
|
||||
|
||||
unless path.include? '/'
|
||||
# Page processed (not contained in a folder)
|
||||
html += new_page page
|
||||
else
|
||||
# Folders start at the next index
|
||||
folder_start = index
|
||||
break # Pages finished, move on to folders
|
||||
end
|
||||
end
|
||||
|
||||
# If there are no folders, then we're done.
|
||||
return enclose_tree(html) if folder_start <= -1
|
||||
|
||||
# Handle special case of only one folder.
|
||||
if (count - folder_start == 1)
|
||||
page = @pages[ folder_start ]
|
||||
html += <<-HTML
|
||||
<li>
|
||||
<label>#{::File.dirname(page.path)}</label> <input type="checkbox" #{@checked} />
|
||||
<ol>
|
||||
#{new_page page}
|
||||
</ol>
|
||||
</li>
|
||||
HTML
|
||||
|
||||
return enclose_tree html
|
||||
end
|
||||
|
||||
sorted_folders = []
|
||||
(folder_start).upto count - 1 do | index |
|
||||
sorted_folders += [[ @pages[ index ].path, index ]]
|
||||
end
|
||||
|
||||
# http://stackoverflow.com/questions/3482814/sorting-list-of-string-paths-in-vb-net
|
||||
sorted_folders.sort! do |first,second|
|
||||
a = first[0]
|
||||
b = second[0]
|
||||
|
||||
# use :: operator because gollum defines its own conflicting File class
|
||||
dir_compare = ::File.dirname(a) <=> ::File.dirname(b)
|
||||
|
||||
# Sort based on directory name unless they're equal (0) in
|
||||
# which case sort based on file name.
|
||||
if dir_compare == 0
|
||||
::File.basename(a) <=> ::File.basename(b)
|
||||
else
|
||||
dir_compare
|
||||
end
|
||||
end
|
||||
|
||||
# keep track of folder depth, 0 = at root.
|
||||
cwd_array = []
|
||||
changed = false
|
||||
|
||||
# process rest of folders
|
||||
(0...sorted_folders.size).each do | index |
|
||||
page = @pages[ sorted_folders[ index ][ 1 ] ]
|
||||
path = page.path
|
||||
folder = ::File.dirname path
|
||||
|
||||
tmp_array = folder.split '/'
|
||||
|
||||
(0...tmp_array.size).each do | index |
|
||||
if cwd_array[ index ].nil? || changed
|
||||
html += new_sub_folder tmp_array[ index ]
|
||||
next
|
||||
end
|
||||
|
||||
if cwd_array[ index ] != tmp_array[ index ]
|
||||
changed = true
|
||||
(cwd_array.size - index).times do
|
||||
html += end_folder
|
||||
end
|
||||
html += new_sub_folder tmp_array[ index ]
|
||||
end
|
||||
end
|
||||
|
||||
html += new_page page
|
||||
cwd_array = tmp_array
|
||||
changed = false
|
||||
end
|
||||
|
||||
# return the completed html
|
||||
enclose_tree html
|
||||
end # end render_files
|
||||
end # end FileView class
|
||||
end # end Gollum module
|
||||
@@ -1,249 +0,0 @@
|
||||
# ~*~ encoding: utf-8 ~*~
|
||||
module Gollum
|
||||
# Controls all access to the Git objects from Gollum. Extend this class to
|
||||
# add custom caching for special cases.
|
||||
class GitAccess
|
||||
# Initializes the GitAccess instance.
|
||||
#
|
||||
# path - The String path to the Git repository that holds the
|
||||
# Gollum site.
|
||||
# page_file_dir - String the directory in which all page files reside
|
||||
#
|
||||
# Returns this instance.
|
||||
def initialize(path, page_file_dir = nil, bare = false)
|
||||
@page_file_dir = page_file_dir
|
||||
@path = path
|
||||
@repo = Grit::Repo.new(path, { :is_bare => bare })
|
||||
clear
|
||||
end
|
||||
|
||||
# Public: Determines whether the Git repository exists on disk.
|
||||
#
|
||||
# Returns true if it exists, or false.
|
||||
def exist?
|
||||
@repo.git.exist?
|
||||
end
|
||||
|
||||
# Public: Converts a given Git reference to a SHA, using the cache if
|
||||
# available.
|
||||
#
|
||||
# ref - a String Git reference (ex: "master")
|
||||
#
|
||||
# Returns a String, or nil if the ref isn't found.
|
||||
def ref_to_sha(ref)
|
||||
ref = ref.to_s
|
||||
return if ref.empty?
|
||||
sha =
|
||||
if sha?(ref)
|
||||
ref
|
||||
else
|
||||
get_cache(:ref, ref) { ref_to_sha!(ref) }
|
||||
end.to_s
|
||||
sha.empty? ? nil : sha
|
||||
end
|
||||
|
||||
# Public: Gets a recursive list of Git blobs for the whole tree at the
|
||||
# given commit.
|
||||
#
|
||||
# ref - A String Git reference or Git SHA to a commit.
|
||||
#
|
||||
# Returns an Array of BlobEntry instances.
|
||||
def tree(ref)
|
||||
if sha = ref_to_sha(ref)
|
||||
get_cache(:tree, sha) { tree!(sha) }
|
||||
else
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
# Public: Fetches the contents of the Git blob at the given SHA.
|
||||
#
|
||||
# sha - A String Git SHA.
|
||||
#
|
||||
# Returns the String content of the blob.
|
||||
def blob(sha)
|
||||
cat_file!(sha)
|
||||
end
|
||||
|
||||
# Public: Looks up the Git commit using the given Git SHA or ref.
|
||||
#
|
||||
# ref - A String Git SHA or ref.
|
||||
#
|
||||
# Returns a Grit::Commit.
|
||||
def commit(ref)
|
||||
if sha?(ref)
|
||||
get_cache(:commit, ref) { commit!(ref) }
|
||||
else
|
||||
if sha = get_cache(:ref, ref)
|
||||
commit(sha)
|
||||
else
|
||||
if cm = commit!(ref)
|
||||
set_cache(:ref, ref, cm.id)
|
||||
set_cache(:commit, cm.id, cm)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Public: Clears all of the cached data that this GitAccess is tracking.
|
||||
#
|
||||
# Returns nothing.
|
||||
def clear
|
||||
@ref_map = {}
|
||||
@tree_map = {}
|
||||
@commit_map = {}
|
||||
end
|
||||
|
||||
# Public: Refreshes just the cached Git reference data. This should
|
||||
# be called after every Gollum update.
|
||||
#
|
||||
# Returns nothing.
|
||||
def refresh
|
||||
@ref_map.clear
|
||||
end
|
||||
|
||||
#########################################################################
|
||||
#
|
||||
# Internal Methods
|
||||
#
|
||||
#########################################################################
|
||||
|
||||
# Gets the String path to the Git repository.
|
||||
attr_reader :path
|
||||
|
||||
# Gets the Grit::Repo instance for the Git repository.
|
||||
attr_reader :repo
|
||||
|
||||
# Gets a Hash cache of refs to commit SHAs.
|
||||
#
|
||||
# {"master" => "abc123", ...}
|
||||
#
|
||||
attr_reader :ref_map
|
||||
|
||||
# Gets a Hash cache of commit SHAs to a recursive tree of blobs.
|
||||
#
|
||||
# {"abc123" => [<BlobEntry>, <BlobEntry>]}
|
||||
#
|
||||
attr_reader :tree_map
|
||||
|
||||
# Gets a Hash cache of commit SHAs to the Grit::Commit instance.
|
||||
#
|
||||
# {"abcd123" => <Grit::Commit>}
|
||||
#
|
||||
attr_reader :commit_map
|
||||
|
||||
# Checks to see if the given String is a 40 character hex SHA.
|
||||
#
|
||||
# str - Possible String SHA.
|
||||
#
|
||||
# Returns true if the String is a SHA, or false.
|
||||
def sha?(str)
|
||||
!!(str =~ /^[0-9a-f]{40}$/)
|
||||
end
|
||||
|
||||
# Looks up the Git SHA for the given Git ref.
|
||||
#
|
||||
# ref - String Git ref.
|
||||
#
|
||||
# Returns a String SHA.
|
||||
def ref_to_sha!(ref)
|
||||
@repo.git.rev_list({:max_count=>1}, ref)
|
||||
rescue Grit::GitRuby::Repository::NoSuchShaFound
|
||||
end
|
||||
|
||||
# Looks up the Git blobs for a given commit.
|
||||
#
|
||||
# sha - String commit SHA.
|
||||
#
|
||||
# Returns an Array of BlobEntry instances.
|
||||
def tree!(sha)
|
||||
tree = @repo.git.native(:ls_tree,
|
||||
{:r => true, :l => true, :z => true}, sha)
|
||||
if tree.respond_to?(:force_encoding)
|
||||
tree.force_encoding("UTF-8")
|
||||
end
|
||||
items = tree.split("\0").inject([]) do |memo, line|
|
||||
memo << parse_tree_line(line)
|
||||
end
|
||||
|
||||
if dir = @page_file_dir
|
||||
regex = /^#{dir}\//
|
||||
items.select { |i| i.path =~ regex }
|
||||
else
|
||||
items
|
||||
end
|
||||
end
|
||||
|
||||
# Reads the content from the Git db at the given SHA.
|
||||
#
|
||||
# sha - The String SHA.
|
||||
#
|
||||
# Returns the String content of the Git object.
|
||||
def cat_file!(sha)
|
||||
@repo.git.cat_file({:p => true}, sha)
|
||||
end
|
||||
|
||||
# Reads a Git commit.
|
||||
#
|
||||
# sha - The string SHA of the Git commit.
|
||||
#
|
||||
# Returns a Grit::Commit.
|
||||
def commit!(sha)
|
||||
@repo.commit(sha)
|
||||
end
|
||||
|
||||
# Attempts to get the given data from a cache. If it doesn't exist, it'll
|
||||
# pass the results of the yielded block to the cache for future accesses.
|
||||
#
|
||||
# name - The cache prefix used in building the full cache key.
|
||||
# key - The unique cache key suffix, usually a String Git SHA.
|
||||
#
|
||||
# Yields a block to pass to the cache.
|
||||
# Returns the cached result.
|
||||
def get_cache(name, key)
|
||||
cache = instance_variable_get("@#{name}_map")
|
||||
value = cache[key]
|
||||
if value.nil? && block_given?
|
||||
set_cache(name, key, value = yield)
|
||||
end
|
||||
value == :_nil ? nil : value
|
||||
end
|
||||
|
||||
# Writes some data to the internal cache.
|
||||
#
|
||||
# name - The cache prefix used in building the full cache key.
|
||||
# key - The unique cache key suffix, usually a String Git SHA.
|
||||
# value - The value to write to the cache.
|
||||
#
|
||||
# Returns nothing.
|
||||
def set_cache(name, key, value)
|
||||
cache = instance_variable_get("@#{name}_map")
|
||||
cache[key] = value || :_nil
|
||||
end
|
||||
|
||||
# Parses a line of output from the `ls-tree` command.
|
||||
#
|
||||
# line - A String line of output:
|
||||
# "100644 blob 839c2291b30495b9a882c17d08254d3c90d8fb53 Home.md"
|
||||
#
|
||||
# Returns an Array of BlobEntry instances.
|
||||
def parse_tree_line(line)
|
||||
mode, type, sha, size, *name = line.split(/\s+/)
|
||||
BlobEntry.new(sha, name.join(' '), size.to_i, mode.to_i(8))
|
||||
end
|
||||
|
||||
# Decode octal sequences (\NNN) in tree path names.
|
||||
#
|
||||
# path - String path name.
|
||||
#
|
||||
# Returns a decoded String.
|
||||
def decode_git_path(path)
|
||||
if path[0] == ?" && path[-1] == ?"
|
||||
path = path[1...-1]
|
||||
path.gsub!(/\\\d{3}/) { |m| m[1..-1].to_i(8).chr }
|
||||
end
|
||||
path.gsub!(/\\[rn"\\]/) { |m| eval(%("#{m.to_s}")) }
|
||||
path
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,48 +0,0 @@
|
||||
# ~*~ encoding: utf-8 ~*~
|
||||
require 'net/http'
|
||||
require 'net/https' # ruby 1.8.7 fix, remove at upgrade
|
||||
require 'uri'
|
||||
require 'open-uri'
|
||||
|
||||
module Gollum
|
||||
class Gitcode
|
||||
def initialize path
|
||||
raise(ArgumentError, 'path is nil or empty') if path.nil? or path.empty?
|
||||
|
||||
@uri = URI::HTTP.build({
|
||||
:path => self.unchomp(path),
|
||||
:host => 'raw.github.com',
|
||||
:scheme => 'https',
|
||||
:port => 443 })
|
||||
end
|
||||
|
||||
def contents
|
||||
@contents ||= self.req @uri
|
||||
end
|
||||
|
||||
def unchomp p
|
||||
return p if p.nil?
|
||||
p[0] == '/' ? p : ('/' + p)
|
||||
end
|
||||
|
||||
def req uri, cut = 1
|
||||
return "Too many redirects or retries" if cut >= 10
|
||||
http = Net::HTTP.new uri.host, uri.port
|
||||
http.use_ssl = true
|
||||
resp = http.get uri.path, {
|
||||
'Accept' => 'text/plain',
|
||||
'Cache-Control' => 'no-cache',
|
||||
'Connection' => 'keep-alive',
|
||||
'Host' => uri.host,
|
||||
'User-Agent' => 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:15.0) Gecko/20100101 Firefox/15.0'
|
||||
}
|
||||
code = resp.code.to_i
|
||||
return resp.body if code == 200
|
||||
return "Not Found" if code == 404
|
||||
return "Unhandled Response Code #{code}" unless code == 304 or not resp.header['location'].nil?
|
||||
loc = URI.parse resp.header['location']
|
||||
uri2 = loc.relative?() ? (uri + loc) : loc # overloads (+)
|
||||
return req uri2, (cut + 1)
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,20 +0,0 @@
|
||||
# ~*~ encoding: utf-8 ~*~
|
||||
|
||||
module Grit
|
||||
class Blob
|
||||
def is_symlink
|
||||
self.mode == 0120000
|
||||
end
|
||||
|
||||
def symlink_target(base_path = nil)
|
||||
target = self.data
|
||||
new_path = File.expand_path(File.join('..', target), base_path)
|
||||
|
||||
if File.file? new_path
|
||||
return new_path
|
||||
end
|
||||
end
|
||||
|
||||
nil
|
||||
end
|
||||
end
|
||||
@@ -32,11 +32,5 @@ module Precious
|
||||
url.gsub('%2F','/').gsub(/^\/+/,'').gsub('//','/')
|
||||
end
|
||||
|
||||
def trim_leading_slash url
|
||||
return url if url.nil?
|
||||
url.gsub!('%2F','/')
|
||||
return '/' + url.gsub(/^\/+/,'') if url[0,1] == '/'
|
||||
url
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,688 +0,0 @@
|
||||
# ~*~ encoding: utf-8 ~*~
|
||||
require 'digest/sha1'
|
||||
require 'cgi'
|
||||
require 'pygments'
|
||||
require 'base64'
|
||||
|
||||
require File.expand_path '../frontend/helpers', __FILE__
|
||||
require File.expand_path '../gitcode', __FILE__
|
||||
|
||||
# initialize Pygments
|
||||
Pygments.start
|
||||
|
||||
module Gollum
|
||||
|
||||
class Markup
|
||||
include Precious::Helpers
|
||||
|
||||
@formats = {}
|
||||
|
||||
class << self
|
||||
attr_reader :formats
|
||||
|
||||
# Register a file extension and associated markup type
|
||||
#
|
||||
# ext - The file extension
|
||||
# name - The name of the markup type
|
||||
# options - Hash of options:
|
||||
# regexp - Regexp to match against.
|
||||
# Defaults to exact match of ext.
|
||||
#
|
||||
# If given a block, that block will be registered with GitHub::Markup to
|
||||
# render any matching pages
|
||||
def register(ext, name, options = {}, &block)
|
||||
regexp = options[:regexp] || Regexp.new(ext.to_s)
|
||||
@formats[ext] = { :name => name, :regexp => regexp }
|
||||
GitHub::Markup.add_markup(regexp, &block) if block_given?
|
||||
end
|
||||
end
|
||||
|
||||
attr_accessor :toc
|
||||
attr_reader :metadata
|
||||
|
||||
# Initialize a new Markup object.
|
||||
#
|
||||
# page - The Gollum::Page.
|
||||
#
|
||||
# Returns a new Gollum::Markup object, ready for rendering.
|
||||
def initialize(page)
|
||||
@wiki = page.wiki
|
||||
@name = page.filename
|
||||
@data = page.text_data
|
||||
@version = page.version.id if page.version
|
||||
@format = page.format
|
||||
@sub_page = page.sub_page
|
||||
@parent_page = page.parent_page
|
||||
@dir = ::File.dirname(page.path)
|
||||
@tagmap = {}
|
||||
@codemap = {}
|
||||
@wsdmap = {}
|
||||
@premap = {}
|
||||
@toc = nil
|
||||
@metadata = nil
|
||||
@to_xml = { :save_with => Nokogiri::XML::Node::SaveOptions::DEFAULT_XHTML ^ 1, :indent => 0, :encoding => 'UTF-8' }
|
||||
end
|
||||
|
||||
# Render the content with Gollum wiki syntax on top of the file's own
|
||||
# markup language.
|
||||
#
|
||||
# no_follow - Boolean that determines if rel="nofollow" is added to all
|
||||
# <a> tags.
|
||||
# encoding - Encoding Constant or String.
|
||||
#
|
||||
# Returns the formatted String content.
|
||||
def render(no_follow = false, encoding = nil)
|
||||
sanitize = no_follow ?
|
||||
@wiki.history_sanitizer :
|
||||
@wiki.sanitizer
|
||||
|
||||
data = @data.dup
|
||||
data = extract_metadata(data)
|
||||
data = extract_gitcode(data)
|
||||
data = extract_code(data)
|
||||
data = extract_wsd(data)
|
||||
data = extract_tags(data)
|
||||
begin
|
||||
data = GitHub::Markup.render(@name, data)
|
||||
if data.nil?
|
||||
raise "There was an error converting #{@name} to HTML."
|
||||
end
|
||||
rescue Object => e
|
||||
data = %{<p class="gollum-error">#{e.message}</p>}
|
||||
end
|
||||
data = process_tags(data)
|
||||
data = process_code(data, encoding)
|
||||
|
||||
doc = Nokogiri::HTML::DocumentFragment.parse(data)
|
||||
doc = sanitize.clean_node!(doc) if sanitize
|
||||
doc,toc = process_headers(doc)
|
||||
@toc = @sub_page ? ( @parent_page ? @parent_page.toc_data : "[[_TOC_]]" ) : toc
|
||||
yield doc if block_given?
|
||||
# nokogiri's save options are ored together. FORMAT has a value of 1 so ^ 1 removes it.
|
||||
# formatting will create extra spaces in pre tags.
|
||||
# https://github.com/sparklemotion/nokogiri/issues/782
|
||||
# DEFAULT_HTML encodes unicode so XHTML is used for proper unicode support in href.
|
||||
data = doc.to_xml( @to_xml )
|
||||
|
||||
data = process_toc_tags(data)
|
||||
data = process_wsd(data)
|
||||
data.gsub!(/<p><\/p>/) do
|
||||
''
|
||||
end
|
||||
|
||||
data
|
||||
end
|
||||
|
||||
# Inserts header anchors and creates TOC
|
||||
#
|
||||
# doc - Nokogiri parsed document
|
||||
#
|
||||
# Returns doc Document and toc String
|
||||
def process_headers(doc)
|
||||
toc = nil
|
||||
doc.css('h1,h2,h3,h4,h5,h6').each do |h|
|
||||
# must escape "
|
||||
h_name = h.content.gsub(' ','-').gsub('"','%22')
|
||||
|
||||
level = h.name.gsub(/[hH]/,'').to_i
|
||||
|
||||
# Add anchors
|
||||
h.add_child(%Q{<a class="anchor" id="#{h_name}" href="##{h_name}"></a>})
|
||||
|
||||
# Build TOC
|
||||
toc ||= Nokogiri::XML::DocumentFragment.parse('<div class="toc"><div class="toc-title">Table of Contents</div></div>')
|
||||
tail ||= toc.child
|
||||
tail_level ||= 0
|
||||
|
||||
while tail_level < level
|
||||
node = Nokogiri::XML::Node.new('ul', doc)
|
||||
tail = tail.add_child(node)
|
||||
tail_level += 1
|
||||
end
|
||||
while tail_level > level
|
||||
tail = tail.parent
|
||||
tail_level -= 1
|
||||
end
|
||||
node = Nokogiri::XML::Node.new('li', doc)
|
||||
# % -> %25 so anchors work on Firefox. See issue #475
|
||||
node.add_child(%Q{<a href="##{h_name}">#{h.content}</a>})
|
||||
tail.add_child(node)
|
||||
end
|
||||
toc = toc.to_xml(@to_xml) if toc != nil
|
||||
[doc, toc]
|
||||
end
|
||||
|
||||
#########################################################################
|
||||
#
|
||||
# Tags
|
||||
#
|
||||
#########################################################################
|
||||
|
||||
# Extract all tags into the tagmap and replace with placeholders.
|
||||
#
|
||||
# data - The raw String data.
|
||||
#
|
||||
# Returns the placeholder'd String data.
|
||||
def extract_tags(data)
|
||||
if @format == :asciidoc
|
||||
return data
|
||||
end
|
||||
data.gsub!(/(.?)\[\[(.+?)\]\]([^\[]?)/m) do
|
||||
if $1 == "'" && $3 != "'"
|
||||
"[[#{$2}]]#{$3}"
|
||||
elsif $2.include?('][')
|
||||
if $2[0..4] == 'file:'
|
||||
pre = $1
|
||||
post = $3
|
||||
parts = $2.split('][')
|
||||
parts[0][0..4] = ""
|
||||
link = "#{parts[1]}|#{parts[0].sub(/\.org/,'')}"
|
||||
id = Digest::SHA1.hexdigest(link)
|
||||
@tagmap[id] = link
|
||||
"#{pre}#{id}#{post}"
|
||||
else
|
||||
$&
|
||||
end
|
||||
else
|
||||
id = Digest::SHA1.hexdigest($2)
|
||||
@tagmap[id] = $2
|
||||
"#{$1}#{id}#{$3}"
|
||||
end
|
||||
end
|
||||
data
|
||||
end
|
||||
|
||||
# Process all tags from the tagmap and replace the placeholders with the
|
||||
# final markup.
|
||||
#
|
||||
# data - The String data (with placeholders).
|
||||
#
|
||||
# Returns the marked up String data.
|
||||
def process_tags(data)
|
||||
@tagmap.each do |id, tag|
|
||||
# If it's preformatted, just put the tag back
|
||||
if is_preformatted?(data, id)
|
||||
data.gsub!(id) do
|
||||
"[[#{tag}]]"
|
||||
end
|
||||
else
|
||||
data.gsub!(id) do
|
||||
process_tag(tag).gsub('%2F', '/')
|
||||
end
|
||||
end
|
||||
end
|
||||
data
|
||||
end
|
||||
|
||||
# Find `id` within `data` and determine if it's within
|
||||
# preformatted tags.
|
||||
#
|
||||
# data - The String data (with placeholders).
|
||||
# id - The String SHA1 hash.
|
||||
PREFORMATTED_TAGS = %w(code tt)
|
||||
def is_preformatted?(data, id)
|
||||
doc = Nokogiri::HTML::DocumentFragment.parse(data)
|
||||
node = doc.search("[text()*='#{id}']").first
|
||||
node && (PREFORMATTED_TAGS.include?(node.name) ||
|
||||
node.ancestors.any? { |a| PREFORMATTED_TAGS.include?(a.name) })
|
||||
end
|
||||
|
||||
# Process a single tag into its final HTML form.
|
||||
#
|
||||
# tag - The String tag contents (the stuff inside the double
|
||||
# brackets).
|
||||
#
|
||||
# Returns the String HTML version of the tag.
|
||||
def process_tag(tag)
|
||||
if tag =~ /^_TOC_$/
|
||||
%{[[#{tag}]]}
|
||||
elsif tag =~ /^_$/
|
||||
%{<div class="clearfloats"></div>}
|
||||
elsif html = process_image_tag(tag)
|
||||
html
|
||||
elsif html = process_file_link_tag(tag)
|
||||
html
|
||||
else
|
||||
process_page_link_tag(tag)
|
||||
end
|
||||
end
|
||||
|
||||
# Attempt to process the tag as an image tag.
|
||||
#
|
||||
# tag - The String tag contents (the stuff inside the double brackets).
|
||||
#
|
||||
# Returns the String HTML if the tag is a valid image tag or nil
|
||||
# if it is not.
|
||||
def process_image_tag(tag)
|
||||
parts = tag.split('|')
|
||||
return if parts.size.zero?
|
||||
|
||||
name = parts[0].strip
|
||||
path = if file = find_file(name)
|
||||
::File.join @wiki.base_path, file.path
|
||||
elsif name =~ /^https?:\/\/.+(jpg|png|gif|svg|bmp)$/i
|
||||
name
|
||||
end
|
||||
|
||||
if path
|
||||
opts = parse_image_tag_options(tag)
|
||||
|
||||
containered = false
|
||||
|
||||
classes = [] # applied to whatever the outermost container is
|
||||
attrs = [] # applied to the image
|
||||
|
||||
align = opts['align']
|
||||
if opts['float']
|
||||
containered = true
|
||||
align ||= 'left'
|
||||
if %w{left right}.include?(align)
|
||||
classes << "float-#{align}"
|
||||
end
|
||||
elsif %w{top texttop middle absmiddle bottom absbottom baseline}.include?(align)
|
||||
attrs << %{align="#{align}"}
|
||||
elsif align
|
||||
if %w{left center right}.include?(align)
|
||||
containered = true
|
||||
classes << "align-#{align}"
|
||||
end
|
||||
end
|
||||
|
||||
if width = opts['width']
|
||||
if width =~ /^\d+(\.\d+)?(em|px)$/
|
||||
attrs << %{width="#{width}"}
|
||||
end
|
||||
end
|
||||
|
||||
if height = opts['height']
|
||||
if height =~ /^\d+(\.\d+)?(em|px)$/
|
||||
attrs << %{height="#{height}"}
|
||||
end
|
||||
end
|
||||
|
||||
if alt = opts['alt']
|
||||
attrs << %{alt="#{alt}"}
|
||||
end
|
||||
|
||||
attr_string = attrs.size > 0 ? attrs.join(' ') + ' ' : ''
|
||||
|
||||
if opts['frame'] || containered
|
||||
classes << 'frame' if opts['frame']
|
||||
%{<span class="#{classes.join(' ')}">} +
|
||||
%{<span>} +
|
||||
%{<img src="#{path}" #{attr_string}/>} +
|
||||
(alt ? %{<span>#{alt}</span>} : '') +
|
||||
%{</span>} +
|
||||
%{</span>}
|
||||
else
|
||||
%{<img src="#{path}" #{attr_string}/>}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Parse any options present on the image tag and extract them into a
|
||||
# Hash of option names and values.
|
||||
#
|
||||
# tag - The String tag contents (the stuff inside the double brackets).
|
||||
#
|
||||
# Returns the options Hash:
|
||||
# key - The String option name.
|
||||
# val - The String option value or true if it is a binary option.
|
||||
def parse_image_tag_options(tag)
|
||||
tag.split('|')[1..-1].inject({}) do |memo, attr|
|
||||
parts = attr.split('=').map { |x| x.strip }
|
||||
memo[parts[0]] = (parts.size == 1 ? true : parts[1])
|
||||
memo
|
||||
end
|
||||
end
|
||||
|
||||
# Attempt to process the tag as a file link tag.
|
||||
#
|
||||
# tag - The String tag contents (the stuff inside the double
|
||||
# brackets).
|
||||
#
|
||||
# Returns the String HTML if the tag is a valid file link tag or nil
|
||||
# if it is not.
|
||||
def process_file_link_tag(tag)
|
||||
parts = tag.split('|')
|
||||
return if parts.size.zero?
|
||||
|
||||
name = parts[0].strip
|
||||
path = parts[1] && parts[1].strip
|
||||
path = if path && file = find_file(path)
|
||||
::File.join @wiki.base_path, file.path
|
||||
elsif path =~ %r{^https?://}
|
||||
path
|
||||
else
|
||||
nil
|
||||
end
|
||||
|
||||
if name && path && file
|
||||
%{<a href="#{::File.join @wiki.base_path, file.path}">#{name}</a>}
|
||||
elsif name && path
|
||||
%{<a href="#{path}">#{name}</a>}
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
# Attempt to process the tag as a page link tag.
|
||||
#
|
||||
# tag - The String tag contents (the stuff inside the double
|
||||
# brackets).
|
||||
#
|
||||
# Returns the String HTML if the tag is a valid page link tag or nil
|
||||
# if it is not.
|
||||
def process_page_link_tag(tag)
|
||||
parts = tag.split('|')
|
||||
parts.reverse! if @format == :mediawiki
|
||||
|
||||
name, page_name = *parts.compact.map(&:strip)
|
||||
cname = @wiki.page_class.cname(page_name || name)
|
||||
|
||||
if name =~ %r{^https?://} && page_name.nil?
|
||||
%{<a href="#{name}">#{name}</a>}
|
||||
else
|
||||
presence = "absent"
|
||||
link_name = cname
|
||||
page, extra = find_page_from_name(cname)
|
||||
if page
|
||||
link_name = @wiki.page_class.cname(page.name)
|
||||
presence = "present"
|
||||
end
|
||||
link = ::File.join(@wiki.base_path, page ? page.escaped_url_path : CGI.escape(link_name))
|
||||
|
||||
# //page is invalid
|
||||
# strip all duplicate forward slashes using helpers.rb trim_leading_slash
|
||||
# //page => /page
|
||||
link = trim_leading_slash link
|
||||
|
||||
%{<a class="internal #{presence}" href="#{link}#{extra}">#{name}</a>}
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
# Process the special table of contents tag [[_TOC_]]
|
||||
#
|
||||
# data - The String data (with placeholders).
|
||||
#
|
||||
# Returns the marked up String data.
|
||||
def process_toc_tags(data)
|
||||
data.gsub!("[[_TOC_]]") do
|
||||
@toc.nil? ? '' : @toc
|
||||
end
|
||||
data
|
||||
end
|
||||
|
||||
# Find the given file in the repo.
|
||||
#
|
||||
# name - The String absolute or relative path of the file.
|
||||
#
|
||||
# Returns the Gollum::File or nil if none was found.
|
||||
def find_file(name, version=@version)
|
||||
if name =~ /^\//
|
||||
@wiki.file(name[1..-1], version)
|
||||
else
|
||||
path = @dir == '.' ? name : ::File.join(@dir, name)
|
||||
@wiki.file(path, version)
|
||||
end
|
||||
end
|
||||
|
||||
# Find a page from a given cname. If the page has an anchor (#) and has
|
||||
# no match, strip the anchor and try again.
|
||||
#
|
||||
# cname - The String canonical page name including path.
|
||||
#
|
||||
# Returns a Gollum::Page instance if a page is found, or an Array of
|
||||
# [Gollum::Page, String extra] if a page without the extra anchor data
|
||||
# is found.
|
||||
def find_page_from_name(cname)
|
||||
slash = cname.rindex('/')
|
||||
|
||||
unless slash.nil?
|
||||
name = cname[slash+1..-1]
|
||||
path = cname[0..slash]
|
||||
page = @wiki.paged(name, path)
|
||||
else
|
||||
page = @wiki.paged(cname, '/') || @wiki.page(cname)
|
||||
end
|
||||
|
||||
if page
|
||||
return page
|
||||
end
|
||||
if pos = cname.index('#')
|
||||
[@wiki.page(cname[0...pos]), cname[pos..-1]]
|
||||
end
|
||||
end
|
||||
|
||||
#########################################################################
|
||||
#
|
||||
# Gitcode - fetch code from github search path and replace the contents
|
||||
# to a code-block that gets run the next parse.
|
||||
# Acceptable formats:
|
||||
# ```language:local-file.ext```
|
||||
# ```language:/abs/other-file.ext```
|
||||
# ```language:github:gollum/gollum/master/somefile.txt```
|
||||
#
|
||||
#########################################################################
|
||||
|
||||
def extract_gitcode data
|
||||
data.gsub /^[ \t]*``` ?([^:\n\r]+):(?:(github:))?([^`\n\r]+)```/ do
|
||||
contents = ''
|
||||
# Use empty string if $2 is nil.
|
||||
uri = $3 || ''
|
||||
# Detect local file.
|
||||
if $2.nil?
|
||||
if file = self.find_file(uri, @wiki.ref)
|
||||
contents = file.raw_data
|
||||
else
|
||||
# How do we communicate a render error?
|
||||
next "File not found: #{Rack::Utils::escape_html(uri)}"
|
||||
end
|
||||
else
|
||||
contents = Gollum::Gitcode.new(uri).contents
|
||||
end
|
||||
|
||||
"```#{$1}\n#{contents}\n```\n"
|
||||
end
|
||||
end
|
||||
|
||||
#########################################################################
|
||||
#
|
||||
# Code
|
||||
#
|
||||
#########################################################################
|
||||
|
||||
# Extract all code blocks into the codemap and replace with placeholders.
|
||||
#
|
||||
# data - The raw String data.
|
||||
#
|
||||
# Returns the placeholder'd String data.
|
||||
def extract_code(data)
|
||||
data.gsub!(/^([ \t]*)(~~~+) ?([^\r\n]+)?\r?\n(.+?)\r?\n\1(~~~+)[ \t\r]*$/m) do
|
||||
m_indent = $1
|
||||
m_start = $2 # ~~~
|
||||
m_lang = $3
|
||||
m_code = $4
|
||||
m_end = $5 # ~~~
|
||||
|
||||
# start and finish tilde fence must be the same length
|
||||
return '' if m_start.length != m_end.length
|
||||
|
||||
lang = m_lang ? m_lang.strip : nil
|
||||
id = Digest::SHA1.hexdigest("#{lang}.#{m_code}")
|
||||
cached = check_cache(:code, id)
|
||||
|
||||
# extract lang from { .ruby } or { #stuff .ruby .indent }
|
||||
# see http://johnmacfarlane.net/pandoc/README.html#delimited-code-blocks
|
||||
|
||||
if lang
|
||||
lang = lang.match(/\.([^}\s]+)/)
|
||||
lang = lang[1] unless lang.nil?
|
||||
end
|
||||
|
||||
@codemap[id] = cached ?
|
||||
{ :output => cached } :
|
||||
{ :lang => lang, :code => m_code, :indent => m_indent }
|
||||
|
||||
"#{m_indent}#{id}" # print the SHA1 ID with the proper indentation
|
||||
end
|
||||
|
||||
data.gsub!(/^([ \t]*)``` ?([^\r\n]+)?\r?\n(.+?)\r?\n\1```[ \t]*\r?$/m) do
|
||||
lang = $2 ? $2.strip : nil
|
||||
id = Digest::SHA1.hexdigest("#{lang}.#{$3}")
|
||||
cached = check_cache(:code, id)
|
||||
@codemap[id] = cached ?
|
||||
{ :output => cached } :
|
||||
{ :lang => lang, :code => $3, :indent => $1 }
|
||||
"#{$1}#{id}" # print the SHA1 ID with the proper indentation
|
||||
end
|
||||
data
|
||||
end
|
||||
|
||||
# Remove the leading space from a code block. Leading space
|
||||
# is only removed if every single line in the block has leading
|
||||
# whitespace.
|
||||
#
|
||||
# code - The code block to remove spaces from
|
||||
# regex - A regex to match whitespace
|
||||
def remove_leading_space(code, regex)
|
||||
if code.lines.all? { |line| line =~ /\A\r?\n\Z/ || line =~ regex }
|
||||
code.gsub!(regex) do
|
||||
''
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Process all code from the codemap and replace the placeholders with the
|
||||
# final HTML.
|
||||
#
|
||||
# data - The String data (with placeholders).
|
||||
# encoding - Encoding Constant or String.
|
||||
#
|
||||
# Returns the marked up String data.
|
||||
def process_code(data, encoding = nil)
|
||||
return data if data.nil? || data.size.zero? || @codemap.size.zero?
|
||||
|
||||
blocks = []
|
||||
@codemap.each do |id, spec|
|
||||
next if spec[:output] # cached
|
||||
|
||||
code = spec[:code]
|
||||
|
||||
remove_leading_space(code, /^#{spec[:indent]}/m)
|
||||
remove_leading_space(code, /^( |\t)/m)
|
||||
|
||||
blocks << [spec[:lang], code]
|
||||
end
|
||||
|
||||
highlighted = []
|
||||
blocks.each do |lang, code|
|
||||
encoding ||= 'utf-8'
|
||||
begin
|
||||
# must set startinline to true for php to be highlighted without <?
|
||||
# http://pygments.org/docs/lexers/
|
||||
hl_code = Pygments.highlight(code, :lexer => lang, :options => {:encoding => encoding.to_s, :startinline => true})
|
||||
rescue
|
||||
hl_code = code
|
||||
end
|
||||
highlighted << hl_code
|
||||
end
|
||||
|
||||
@codemap.each do |id, spec|
|
||||
body = spec[:output] || begin
|
||||
if (body = highlighted.shift.to_s).size > 0
|
||||
update_cache(:code, id, body)
|
||||
body
|
||||
else
|
||||
"<pre><code>#{CGI.escapeHTML(spec[:code])}</code></pre>"
|
||||
end
|
||||
end
|
||||
data.gsub!(id) do
|
||||
body
|
||||
end
|
||||
end
|
||||
|
||||
data
|
||||
end
|
||||
|
||||
#########################################################################
|
||||
#
|
||||
# Sequence Diagrams
|
||||
#
|
||||
#########################################################################
|
||||
|
||||
# Extract all sequence diagram blocks into the wsdmap and replace with
|
||||
# placeholders.
|
||||
#
|
||||
# data - The raw String data.
|
||||
#
|
||||
# Returns the placeholder'd String data.
|
||||
def extract_wsd(data)
|
||||
data.gsub(/^\{\{\{\{\{\{ ?(.+?)\r?\n(.+?)\r?\n\}\}\}\}\}\}\r?$/m) do
|
||||
id = Digest::SHA1.hexdigest($2)
|
||||
@wsdmap[id] = { :style => $1, :code => $2 }
|
||||
id
|
||||
end
|
||||
end
|
||||
|
||||
# Process all diagrams from the wsdmap and replace the placeholders with
|
||||
# the final HTML.
|
||||
#
|
||||
# data - The String data (with placeholders).
|
||||
#
|
||||
# Returns the marked up String data.
|
||||
def process_wsd(data)
|
||||
@wsdmap.each do |id, spec|
|
||||
style = spec[:style]
|
||||
code = spec[:code]
|
||||
data.gsub!(id) do
|
||||
Gollum::WebSequenceDiagram.new(code, style).to_tag
|
||||
end
|
||||
end
|
||||
data
|
||||
end
|
||||
|
||||
#########################################################################
|
||||
#
|
||||
# Metadata
|
||||
#
|
||||
#########################################################################
|
||||
|
||||
# Extract metadata for data and build metadata table. Metadata
|
||||
# is content found between markers, and must
|
||||
# be a valid YAML mapping.
|
||||
#
|
||||
# Because ri and ruby 1.8.7 are awesome, the markers can't
|
||||
# be included in this documentation without triggering
|
||||
# `Unhandled special: Special: type=17`
|
||||
# Please read the source code for the exact markers
|
||||
#
|
||||
# Returns the String of formatted data with metadata removed.
|
||||
def extract_metadata(data)
|
||||
@metadata = {}
|
||||
data
|
||||
end
|
||||
|
||||
# Hook for getting the formatted value of extracted tag data.
|
||||
#
|
||||
# type - Symbol value identifying what type of data is being extracted.
|
||||
# id - String SHA1 hash of original extracted tag data.
|
||||
#
|
||||
# Returns the String cached formatted data, or nil.
|
||||
def check_cache(type, id)
|
||||
end
|
||||
|
||||
# Hook for caching the formatted value of extracted tag data.
|
||||
#
|
||||
# type - Symbol value identifying what type of data is being extracted.
|
||||
# id - String SHA1 hash of original extracted tag data.
|
||||
# data - The String formatted value to be cached.
|
||||
#
|
||||
# Returns nothing.
|
||||
def update_cache(type, id, data)
|
||||
end
|
||||
end
|
||||
|
||||
MarkupGFM = Markup
|
||||
end
|
||||
@@ -1,13 +0,0 @@
|
||||
module Gollum
|
||||
class Markup
|
||||
register(:markdown, "Markdown", :regexp => /md|mkdn?|mdown|markdown/)
|
||||
register(:textile, "Textile")
|
||||
register(:rdoc, "RDoc")
|
||||
register(:org, "Org-mode")
|
||||
register(:creole, "Creole")
|
||||
register(:rest, "reStructuredText", :regexp => /re?st(\.txt)?/)
|
||||
register(:asciidoc, "AsciiDoc")
|
||||
register(:mediawiki, "MediaWiki", :regexp => /(media)?wiki/)
|
||||
register(:pod, "Pod")
|
||||
end
|
||||
end
|
||||
@@ -1,485 +0,0 @@
|
||||
# ~*~ encoding: utf-8 ~*~
|
||||
module Gollum
|
||||
class Page
|
||||
include Pagination
|
||||
|
||||
Wiki.page_class = self
|
||||
|
||||
# Sets a Boolean determing whether this page is a historical version.
|
||||
#
|
||||
# Returns nothing.
|
||||
attr_writer :historical
|
||||
|
||||
# Parent page if this is a sub page
|
||||
#
|
||||
# Returns a Page
|
||||
attr_accessor :parent_page
|
||||
|
||||
|
||||
# Checks a filename against the registered markup extensions
|
||||
#
|
||||
# filename - String filename, like "Home.md"
|
||||
#
|
||||
# Returns e.g. ["Home", :markdown], or [] if the extension is unregistered
|
||||
def self.parse_filename(filename)
|
||||
return [] unless filename =~ /^(.+)\.([a-zA-Z]\w*)$/i
|
||||
pref, ext = $1, $2
|
||||
|
||||
Gollum::Markup.formats.each_pair do |name, format|
|
||||
return [pref, name] if ext =~ format[:regexp]
|
||||
end
|
||||
[]
|
||||
end
|
||||
|
||||
# Checks if a filename has a valid, registered extension
|
||||
#
|
||||
# filename - String filename, like "Home.md".
|
||||
#
|
||||
# Returns the matching String basename of the file without the extension.
|
||||
def self.valid_filename?(filename)
|
||||
self.parse_filename(filename).first
|
||||
end
|
||||
|
||||
# Checks if a filename has a valid extension understood by GitHub::Markup.
|
||||
# Also, checks if the filename has no "_" in the front (such as
|
||||
# _Footer.md).
|
||||
#
|
||||
# filename - String filename, like "Home.md".
|
||||
#
|
||||
# Returns the matching String basename of the file without the extension.
|
||||
def self.valid_page_name?(filename)
|
||||
match = valid_filename?(filename)
|
||||
filename =~ /^_/ ? false : match
|
||||
end
|
||||
|
||||
# Public: The format of a given filename.
|
||||
#
|
||||
# filename - The String filename.
|
||||
#
|
||||
# Returns the Symbol format of the page; one of the registered format types
|
||||
def self.format_for(filename)
|
||||
self.parse_filename(filename).last
|
||||
end
|
||||
|
||||
# Reusable filter to turn a filename (without path) into a canonical name.
|
||||
# Strips extension, converts dashes to spaces.
|
||||
#
|
||||
# Returns the filtered String.
|
||||
def self.canonicalize_filename(filename)
|
||||
strip_filename(filename).gsub('-', ' ')
|
||||
end
|
||||
|
||||
# Reusable filter to strip extension and path from filename
|
||||
#
|
||||
# filename - The string path or filename to strip
|
||||
#
|
||||
# Returns the stripped String.
|
||||
def self.strip_filename(filename)
|
||||
::File.basename(filename, ::File.extname(filename))
|
||||
end
|
||||
|
||||
# Public: Initialize a page.
|
||||
#
|
||||
# wiki - The Gollum::Wiki in question.
|
||||
#
|
||||
# Returns a newly initialized Gollum::Page.
|
||||
def initialize(wiki)
|
||||
@wiki = wiki
|
||||
@blob = @header = @footer = @sidebar = nil
|
||||
@doc = nil
|
||||
@parent_page = nil
|
||||
end
|
||||
|
||||
# Public: The on-disk filename of the page including extension.
|
||||
#
|
||||
# Returns the String name.
|
||||
def filename
|
||||
@blob && @blob.name
|
||||
end
|
||||
|
||||
# Public: The on-disk filename of the page with extension stripped.
|
||||
#
|
||||
# Returns the String name.
|
||||
def filename_stripped
|
||||
self.class.strip_filename(filename)
|
||||
end
|
||||
|
||||
# Public: The canonical page name without extension, and dashes converted
|
||||
# to spaces.
|
||||
#
|
||||
# Returns the String name.
|
||||
def name
|
||||
self.class.canonicalize_filename(filename)
|
||||
end
|
||||
|
||||
# Public: The title will be constructed from the
|
||||
# filename by stripping the extension and replacing any dashes with
|
||||
# spaces.
|
||||
#
|
||||
# Returns the fully sanitized String title.
|
||||
def title
|
||||
Sanitize.clean(name).strip
|
||||
end
|
||||
|
||||
# Public: Determines if this is a sub-page
|
||||
# Sub-pages have filenames beginning with an underscore
|
||||
#
|
||||
# Returns true or false.
|
||||
def sub_page
|
||||
filename =~ /^_/
|
||||
end
|
||||
|
||||
# Public: The path of the page within the repo.
|
||||
#
|
||||
# Returns the String path.
|
||||
attr_reader :path
|
||||
|
||||
# Public: The url path required to reach this page within the repo.
|
||||
#
|
||||
# Returns the String url_path
|
||||
def url_path
|
||||
path = if self.path.include?('/')
|
||||
self.path.sub(/\/[^\/]+$/, '/')
|
||||
else
|
||||
''
|
||||
end
|
||||
|
||||
path << Page.cname(self.name, '-', '-')
|
||||
path
|
||||
end
|
||||
|
||||
# Public: Defines title for page.rb
|
||||
#
|
||||
# Returns the String title
|
||||
def url_path_title
|
||||
metadata_title || url_path.gsub("-", " ")
|
||||
end
|
||||
|
||||
# Public: Metadata title
|
||||
#
|
||||
# Set with <!-- --- title: New Title --> in page content
|
||||
#
|
||||
# Returns the String title or nil if not defined
|
||||
def metadata_title
|
||||
if metadata
|
||||
title = metadata['title']
|
||||
return title unless title.nil?
|
||||
end
|
||||
|
||||
nil
|
||||
end
|
||||
|
||||
# Public: The url_path, but CGI escaped.
|
||||
#
|
||||
# Returns the String url_path
|
||||
def escaped_url_path
|
||||
CGI.escape(self.url_path).gsub('%2F','/')
|
||||
end
|
||||
|
||||
# Public: The raw contents of the page.
|
||||
#
|
||||
# Returns the String data.
|
||||
def raw_data
|
||||
return nil unless @blob
|
||||
|
||||
if !@wiki.repo.bare && @blob.is_symlink
|
||||
new_path = @blob.symlink_target(::File.join(@wiki.repo.path, '..', self.path))
|
||||
return IO.read(new_path) if new_path
|
||||
end
|
||||
|
||||
@blob.data
|
||||
end
|
||||
|
||||
# Public: A text data encoded in specified encoding.
|
||||
#
|
||||
# encoding - An Encoding or nil
|
||||
#
|
||||
# Returns a character encoding aware String.
|
||||
def text_data(encoding=nil)
|
||||
if raw_data.respond_to?(:encoding)
|
||||
raw_data.force_encoding(encoding || Encoding::UTF_8)
|
||||
else
|
||||
raw_data
|
||||
end
|
||||
end
|
||||
|
||||
# Public: The formatted contents of the page.
|
||||
#
|
||||
# encoding - Encoding Constant or String.
|
||||
#
|
||||
# Returns the String data.
|
||||
def formatted_data(encoding = nil, &block)
|
||||
@blob && markup_class.render(historical?, encoding) do |doc|
|
||||
@doc = doc
|
||||
yield doc if block_given?
|
||||
end
|
||||
end
|
||||
|
||||
# Public: The table of contents of the page.
|
||||
#
|
||||
# formatted_data - page already marked up in html.
|
||||
#
|
||||
# Returns the String data.
|
||||
def toc_data()
|
||||
return @parent_page.toc_data if @parent_page and @sub_page
|
||||
formatted_data if markup_class.toc == nil
|
||||
markup_class.toc
|
||||
end
|
||||
|
||||
# Public: Embedded metadata.
|
||||
#
|
||||
# Returns Hash of metadata.
|
||||
def metadata()
|
||||
formatted_data if markup_class.metadata == nil
|
||||
markup_class.metadata
|
||||
end
|
||||
|
||||
# Public: The format of the page.
|
||||
#
|
||||
# Returns the Symbol format of the page; one of the registered format types
|
||||
def format
|
||||
self.class.format_for(@blob.name)
|
||||
end
|
||||
|
||||
# Gets the Gollum::Markup instance that will render this page's content.
|
||||
#
|
||||
# Returns a Gollum::Markup instance.
|
||||
def markup_class
|
||||
@markup_class ||= @wiki.markup_classes[format].new(self)
|
||||
end
|
||||
|
||||
# Public: The current version of the page.
|
||||
#
|
||||
# Returns the Grit::Commit.
|
||||
attr_reader :version
|
||||
|
||||
# Public: All of the versions that have touched the Page.
|
||||
#
|
||||
# options - The options Hash:
|
||||
# :page - The Integer page number (default: 1).
|
||||
# :per_page - The Integer max count of items to return.
|
||||
# :follow - Follow's a file across renames, but falls back
|
||||
# to a slower Grit native call. (default: false)
|
||||
#
|
||||
# Returns an Array of Grit::Commit.
|
||||
def versions(options = {})
|
||||
if options[:follow]
|
||||
options[:pretty] = 'raw'
|
||||
options.delete :max_count
|
||||
options.delete :skip
|
||||
log = @wiki.repo.git.native "log", options, @wiki.ref, "--", @path
|
||||
Grit::Commit.list_from_string(@wiki.repo, log)
|
||||
else
|
||||
@wiki.repo.log(@wiki.ref, @path, log_pagination_options(options))
|
||||
end
|
||||
end
|
||||
|
||||
# Public: The first 7 characters of the current version.
|
||||
#
|
||||
# Returns the first 7 characters of the current version.
|
||||
def version_short
|
||||
version.to_s[0,7]
|
||||
end
|
||||
|
||||
# Public: The header Page.
|
||||
#
|
||||
# Returns the header Page or nil if none exists.
|
||||
def header
|
||||
@header ||= find_sub_page(:header)
|
||||
end
|
||||
|
||||
# Public: The footer Page.
|
||||
#
|
||||
# Returns the footer Page or nil if none exists.
|
||||
def footer
|
||||
@footer ||= find_sub_page(:footer)
|
||||
end
|
||||
|
||||
# Public: The sidebar Page.
|
||||
#
|
||||
# Returns the sidebar Page or nil if none exists.
|
||||
def sidebar
|
||||
@sidebar ||= find_sub_page(:sidebar)
|
||||
end
|
||||
|
||||
# Gets a Boolean determining whether this page is a historical version.
|
||||
# Historical pages are pulled using exact SHA hashes and format all links
|
||||
# with rel="nofollow"
|
||||
#
|
||||
# Returns true if the page is pulled from a named branch or tag, or false.
|
||||
def historical?
|
||||
!!@historical
|
||||
end
|
||||
|
||||
#########################################################################
|
||||
#
|
||||
# Class Methods
|
||||
#
|
||||
#########################################################################
|
||||
|
||||
# Convert a human page name into a canonical page name.
|
||||
#
|
||||
# name - The String human page name.
|
||||
# char_white_sub - Substitution for whitespace
|
||||
# char_other_sub - Substitution for other special chars
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# Page.cname("Bilbo Baggins")
|
||||
# # => 'Bilbo-Baggins'
|
||||
#
|
||||
# Page.cname("Bilbo Baggins",'_')
|
||||
# # => 'Bilbo_Baggins'
|
||||
#
|
||||
# Returns the String canonical name.
|
||||
def self.cname(name, char_white_sub = '-', char_other_sub = '-')
|
||||
name.respond_to?(:gsub) ?
|
||||
name.gsub(%r{\s},char_white_sub).gsub(%r{[<>+]}, char_other_sub) :
|
||||
''
|
||||
end
|
||||
|
||||
# Convert a format Symbol into an extension String.
|
||||
#
|
||||
# format - The format Symbol.
|
||||
#
|
||||
# Returns the String extension (no leading period).
|
||||
def self.format_to_ext(format)
|
||||
format == :markdown ? "md" : format.to_s
|
||||
end
|
||||
|
||||
#########################################################################
|
||||
#
|
||||
# Internal Methods
|
||||
#
|
||||
#########################################################################
|
||||
|
||||
# The underlying wiki repo.
|
||||
#
|
||||
# Returns the Gollum::Wiki containing the page.
|
||||
attr_reader :wiki
|
||||
|
||||
# Set the Grit::Commit version of the page.
|
||||
#
|
||||
# Returns nothing.
|
||||
attr_writer :version
|
||||
|
||||
# Find a page in the given Gollum repo.
|
||||
#
|
||||
# name - The human or canonical String page name to find.
|
||||
# version - The String version ID to find.
|
||||
#
|
||||
# Returns a Gollum::Page or nil if the page could not be found.
|
||||
def find(name, version, dir = nil, exact = false)
|
||||
map = @wiki.tree_map_for(version.to_s)
|
||||
if page = find_page_in_tree(map, name, dir, exact)
|
||||
page.version = version.is_a?(Grit::Commit) ?
|
||||
version : @wiki.commit_for(version)
|
||||
page.historical = page.version.to_s == version.to_s
|
||||
page
|
||||
end
|
||||
rescue Grit::GitRuby::Repository::NoSuchShaFound
|
||||
end
|
||||
|
||||
# Find a page in a given tree.
|
||||
#
|
||||
# map - The Array tree map from Wiki#tree_map.
|
||||
# name - The canonical String page name.
|
||||
# checked_dir - Optional String of the directory a matching page needs
|
||||
# to be in. The string should
|
||||
#
|
||||
# Returns a Gollum::Page or nil if the page could not be found.
|
||||
def find_page_in_tree(map, name, checked_dir = nil, exact = false)
|
||||
return nil if !map || name.to_s.empty?
|
||||
if checked_dir = BlobEntry.normalize_dir(checked_dir)
|
||||
checked_dir.downcase!
|
||||
end
|
||||
|
||||
checked_dir = '' if exact && checked_dir.nil?
|
||||
|
||||
map.each do |entry|
|
||||
next if entry.name.to_s.empty?
|
||||
next unless checked_dir.nil? || entry.dir.downcase == checked_dir
|
||||
next unless page_match(name, entry.name)
|
||||
return entry.page(@wiki, @version)
|
||||
end
|
||||
|
||||
return nil # nothing was found
|
||||
end
|
||||
|
||||
# Populate the Page with information from the Blob.
|
||||
#
|
||||
# blob - The Grit::Blob that contains the info.
|
||||
# path - The String directory path of the page file.
|
||||
#
|
||||
# Returns the populated Gollum::Page.
|
||||
def populate(blob, path=nil)
|
||||
@blob = blob
|
||||
@path = "#{path}/#{blob.name}"[1..-1]
|
||||
self
|
||||
end
|
||||
|
||||
# The full directory path for the given tree.
|
||||
#
|
||||
# treemap - The Hash treemap containing parentage information.
|
||||
# tree - The Grit::Tree for which to compute the path.
|
||||
#
|
||||
# Returns the String path.
|
||||
def tree_path(treemap, tree)
|
||||
if ptree = treemap[tree]
|
||||
tree_path(treemap, ptree) + '/' + tree.name
|
||||
else
|
||||
''
|
||||
end
|
||||
end
|
||||
|
||||
# Compare the canonicalized versions of the two names.
|
||||
#
|
||||
# name - The human or canonical String page name.
|
||||
# filename - the String filename on disk (including extension).
|
||||
#
|
||||
# Returns a Boolean.
|
||||
def page_match(name, filename)
|
||||
if match = self.class.valid_filename?(filename)
|
||||
@wiki.ws_subs.each do |sub|
|
||||
return true if Page.cname(name).downcase == Page.cname(match, sub).downcase
|
||||
end
|
||||
end
|
||||
false
|
||||
end
|
||||
|
||||
# Loads a sub page. Sub page names (footers, headers, sidebars) are prefixed with
|
||||
# an underscore to distinguish them from other Pages. If there is not one within
|
||||
# the current directory, starts walking up the directory tree to try and find one
|
||||
# within parent directories.
|
||||
#
|
||||
# name - String page name.
|
||||
#
|
||||
# Returns the Page or nil if none exists.
|
||||
def find_sub_page(name)
|
||||
return nil unless self.version
|
||||
return nil if self.filename =~ /^_/
|
||||
name = "_#{name.to_s.capitalize}"
|
||||
return nil if page_match(name, self.filename)
|
||||
|
||||
dirs = self.path.split('/')
|
||||
dirs.pop
|
||||
map = @wiki.tree_map_for(@wiki.ref, true)
|
||||
while !dirs.empty?
|
||||
if page = find_page_in_tree(map, name, dirs.join('/'))
|
||||
page.parent_page = self
|
||||
return page
|
||||
end
|
||||
dirs.pop
|
||||
end
|
||||
|
||||
if page = find_page_in_tree(map, name, '')
|
||||
page.parent_page = self
|
||||
end
|
||||
page
|
||||
end
|
||||
|
||||
def inspect
|
||||
%(#<#{self.class.name}:#{object_id} #{name} (#{format}) @wiki=#{@wiki.repo.path.inspect}>)
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,62 +0,0 @@
|
||||
# ~*~ encoding: utf-8 ~*~
|
||||
module Gollum
|
||||
module Pagination
|
||||
def self.included(klass)
|
||||
klass.extend ClassMethods
|
||||
class << klass
|
||||
# Default Integer max count of items to return in git commands.
|
||||
attr_accessor :per_page
|
||||
end
|
||||
klass.per_page = 30
|
||||
end
|
||||
|
||||
module ClassMethods
|
||||
# Turns a page number into an offset number for the git skip option.
|
||||
#
|
||||
# page - Integer page number.
|
||||
#
|
||||
# Returns an Integer.
|
||||
def page_to_skip(page)
|
||||
([1, page.to_i].max - 1) * per_page
|
||||
end
|
||||
|
||||
# Fills in git-specific options for the log command using simple
|
||||
# pagination options.
|
||||
#
|
||||
# options - Hash of options:
|
||||
# page - Optional Integer page number (default: 1)
|
||||
# per_page - Optional Integer max count of items to return.
|
||||
# Defaults to #per_class class method.
|
||||
#
|
||||
# Returns Hash with :max_count and :skip keys.
|
||||
def log_pagination_options(options = {})
|
||||
skip = page_to_skip(options.delete(:page))
|
||||
options[:max_count] = [options.delete(:per_page).to_i, per_page].max
|
||||
options[:skip] = skip if skip > 0
|
||||
options
|
||||
end
|
||||
end
|
||||
|
||||
# Turns a page number into an offset number for the git skip option.
|
||||
#
|
||||
# page - Integer page number.
|
||||
#
|
||||
# Returns an Integer.
|
||||
def page_to_skip(page)
|
||||
self.class.page_to_skip(page)
|
||||
end
|
||||
|
||||
# Fills in git-specific options for the log command using simple
|
||||
# pagination options.
|
||||
#
|
||||
# options - Hash of options:
|
||||
# page - Optional Integer page number (default: 1)
|
||||
# per_page - Optional Integer max count of items to return.
|
||||
# Defaults to #per_class class method.
|
||||
#
|
||||
# Returns Hash with :max_count and :skip keys.
|
||||
def log_pagination_options(options = {})
|
||||
self.class.log_pagination_options(options)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
Before Width: | Height: | Size: 939 B After Width: | Height: | Size: 939 B |
|
Before Width: | Height: | Size: 177 B After Width: | Height: | Size: 177 B |
|
Before Width: | Height: | Size: 271 B After Width: | Height: | Size: 271 B |
|
Before Width: | Height: | Size: 433 B After Width: | Height: | Size: 433 B |
|
Before Width: | Height: | Size: 462 B After Width: | Height: | Size: 462 B |
|
Before Width: | Height: | Size: 9.3 KiB After Width: | Height: | Size: 9.3 KiB |
|
Before Width: | Height: | Size: 376 B After Width: | Height: | Size: 376 B |
|
Before Width: | Height: | Size: 1.0 KiB After Width: | Height: | Size: 1.0 KiB |
|
Before Width: | Height: | Size: 2.9 KiB After Width: | Height: | Size: 2.9 KiB |
|
Before Width: | Height: | Size: 3.0 KiB After Width: | Height: | Size: 3.0 KiB |
|
Before Width: | Height: | Size: 3.0 KiB After Width: | Height: | Size: 3.0 KiB |
|
Before Width: | Height: | Size: 3.1 KiB After Width: | Height: | Size: 3.1 KiB |
|
Before Width: | Height: | Size: 525 B After Width: | Height: | Size: 525 B |
|
Before Width: | Height: | Size: 630 B After Width: | Height: | Size: 630 B |
|
Before Width: | Height: | Size: 919 B After Width: | Height: | Size: 919 B |
|
Before Width: | Height: | Size: 235 B After Width: | Height: | Size: 235 B |
|
Before Width: | Height: | Size: 278 B After Width: | Height: | Size: 278 B |
|
Before Width: | Height: | Size: 759 B After Width: | Height: | Size: 759 B |
|
Before Width: | Height: | Size: 290 B After Width: | Height: | Size: 290 B |