summaryrefslogtreecommitdiffstats
path: root/FAQ
diff options
context:
space:
mode:
authorDr. Stephen Henson <steve@openssl.org>2004-03-02 13:39:23 +0000
committerDr. Stephen Henson <steve@openssl.org>2004-03-02 13:39:23 +0000
commitec7c9ee8b8715f60c5f1f315ce2f8a5022a01473 (patch)
tree65c1db32dcccd5589ec1dc140472b4c7e9e3f603 /FAQ
parentf82bb9cb9c50a9b998143218a88d2a5b53b70be6 (diff)
Indent some of the code examples.
Diffstat (limited to 'FAQ')
-rw-r--r--FAQ22
1 files changed, 11 insertions, 11 deletions
diff --git a/FAQ b/FAQ
index ca5683def7..bd40bcd243 100644
--- a/FAQ
+++ b/FAQ
@@ -646,26 +646,26 @@ built OpenSSL with /MD your application must use /MD and cannot use /MDd.
* How do I read or write a DER encoded buffer using the ASN1 functions?
You have two options. You can either use a memory BIO in conjunction
-with the i2d_XXX_bio() or d2i_XXX_bio() functions or you can use the
-i2d_XXX(), d2i_XXX() functions directly. Since these are often the
+with the i2d_*_bio() or d2i_*_bio() functions or you can use the
+i2d_*(), d2i_*() functions directly. Since these are often the
cause of grief here are some code fragments using PKCS7 as an example:
-unsigned char *buf, *p;
-int len;
+ unsigned char *buf, *p;
+ int len;
-len = i2d_PKCS7(p7, NULL);
-buf = OPENSSL_malloc(len); /* or Malloc, error checking omitted */
-p = buf;
-i2d_PKCS7(p7, &p);
+ len = i2d_PKCS7(p7, NULL);
+ buf = OPENSSL_malloc(len); /* or Malloc, error checking omitted */
+ p = buf;
+ i2d_PKCS7(p7, &p);
At this point buf contains the len bytes of the DER encoding of
p7.
The opposite assumes we already have len bytes in buf:
-unsigned char *p;
-p = buf;
-p7 = d2i_PKCS7(NULL, &p, len);
+ unsigned char *p;
+ p = buf;
+ p7 = d2i_PKCS7(NULL, &p, len);
At this point p7 contains a valid PKCS7 structure of NULL if an error
occurred. If an error occurred ERR_print_errors(bio) should give more
-0.126.3'>release-0.126.3 Mirror of https://github.com/gohugoio/hugomatthias
summaryrefslogtreecommitdiffstats
path: root/hugofs/files/classifier.go
blob: bdac2d68698f6681d0c5aa2f440e51e8dbad03d8 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package files

import (
	"bufio"
	"fmt"
	"io"
	"os"
	"path/filepath"
	"sort"
	"strings"
	"unicode"

	"github.com/spf13/afero"
)

const (
	// The NPM package.json "template" file.
	FilenamePackageHugoJSON = "package.hugo.json"
	// The NPM package file.
	FilenamePackageJSON = "package.json"

	FilenameHugoStatsJSON = "hugo_stats.json"
)

var (
	// This should be the only list of valid extensions for content files.
	contentFileExtensions = []string{
		"html", "htm",
		"mdown", "markdown", "md",
		"asciidoc", "adoc", "ad",
		"rest", "rst",
		"org",
		"pandoc", "pdc",
	}

	contentFileExtensionsSet map[string]bool

	htmlFileExtensions = []string{
		"html", "htm",
	}

	htmlFileExtensionsSet map[string]bool
)

func init() {
	contentFileExtensionsSet = make(map[string]bool)
	for _, ext := range contentFileExtensions {
		contentFileExtensionsSet[ext] = true
	}
	htmlFileExtensionsSet = make(map[string]bool)
	for _, ext := range htmlFileExtensions {
		htmlFileExtensionsSet[ext] = true
	}
}

func IsContentFile(filename string) bool {
	return contentFileExtensionsSet[strings.TrimPrefix(filepath.Ext(filename), ".")]
}

func IsIndexContentFile(filename string) bool {
	if !IsContentFile(filename) {
		return false
	}

	base := filepath.Base(filename)

	return strings.HasPrefix(base, "index.") || strings.HasPrefix(base, "_index.")
}

func IsHTMLFile(filename string) bool {
	return htmlFileExtensionsSet[strings.TrimPrefix(filepath.Ext(filename), ".")]
}

func IsContentExt(ext string) bool {
	return contentFileExtensionsSet[ext]
}

type ContentClass string

const (
	ContentClassLeaf    ContentClass = "leaf"
	ContentClassBranch  ContentClass = "branch"
	ContentClassFile    ContentClass = "zfile" // Sort below
	ContentClassContent ContentClass = "zcontent"
)

func (c ContentClass) IsBundle() bool {
	return c == ContentClassLeaf || c == ContentClassBranch
}

func ClassifyContentFile(filename string, open func() (afero.File, error)) ContentClass {
	if !IsContentFile(filename) {
		return ContentClassFile
	}

	if IsHTMLFile(filename) {
		// We need to look inside the file. If the first non-whitespace
		// character is a "<", then we treat it as a regular file.
		// Eearlier we created pages for these files, but that had all sorts
		// of troubles, and isn't what it says in the documentation.
		// See https://github.com/gohugoio/hugo/issues/7030
		if open == nil {
			panic(fmt.Sprintf("no file opener provided for %q", filename))
		}

		f, err := open()
		if err != nil {
			return ContentClassFile
		}
		ishtml := isHTMLContent(f)
		f.Close()
		if ishtml {
			return ContentClassFile
		}

	}

	if strings.HasPrefix(filename, "_index.") {
		return ContentClassBranch
	}

	if strings.HasPrefix(filename, "index.") {
		return ContentClassLeaf
	}

	return ContentClassContent
}

var htmlComment = []rune{'<', '!', '-', '-'}

func isHTMLContent(r io.Reader) bool {
	br := bufio.NewReader(r)
	i := 0
	for {
		c, _, err := br.ReadRune()
		if err != nil {
			break
		}

		if i > 0 {
			if i >= len(htmlComment) {
				return false
			}

			if c != htmlComment[i] {
				return true
			}

			i++
			continue
		}

		if !unicode.IsSpace(c) {
			if i == 0 && c != '<' {
				return false
			}
			i++
		}
	}
	return true
}

const (
	ComponentFolderArchetypes = "archetypes"
	ComponentFolderStatic     = "static"
	ComponentFolderLayouts    = "layouts"
	ComponentFolderContent    = "content"
	ComponentFolderData       = "data"
	ComponentFolderAssets     = "assets"
	ComponentFolderI18n       = "i18n"

	FolderResources = "resources"
	FolderJSConfig  = "_jsconfig" // Mounted below /assets with postcss.config.js etc.
)

var (
	JsConfigFolderMountPrefix = filepath.Join(ComponentFolderAssets, FolderJSConfig)

	ComponentFolders = []string{
		ComponentFolderArchetypes,
		ComponentFolderStatic,
		ComponentFolderLayouts,
		ComponentFolderContent,
		ComponentFolderData,
		ComponentFolderAssets,
		ComponentFolderI18n,
	}

	componentFoldersSet = make(map[string]bool)
)

func init() {
	sort.Strings(ComponentFolders)
	for _, f := range ComponentFolders {
		componentFoldersSet[f] = true
	}
}

// ResolveComponentFolder returns "content" from "content/blog/foo.md" etc.
func ResolveComponentFolder(filename string) string {
	filename = strings.TrimPrefix(filename, string(os.PathSeparator))
	for _, cf := range ComponentFolders {
		if strings.HasPrefix(filename, cf) {
			return cf
		}
	}

	return ""
}

func IsComponentFolder(name string) bool {
	return componentFoldersSet[name]
}