chore: replace Sculpin with Astro
This commit is contained in:
parent
d0ef96dead
commit
d1b76dda33
830 changed files with 5819 additions and 10532 deletions
|
@ -1,37 +0,0 @@
|
|||
<?php
|
||||
|
||||
declare(strict_types=1);
|
||||
|
||||
namespace App\Collection;
|
||||
|
||||
use Carbon\Carbon;
|
||||
use Illuminate\Support\Collection;
|
||||
|
||||
/**
|
||||
* @template TKey of array-key
|
||||
* @template TValue
|
||||
*/
|
||||
final class TalkCollection extends Collection
|
||||
{
|
||||
private const DATE_FORMAT = 'Y-m-d';
|
||||
private const KEY_EVENTS = 'events';
|
||||
private const KEY_EVENT_DATE = 'date';
|
||||
|
||||
/**
|
||||
* @return self<TKey, TValue>
|
||||
*/
|
||||
public function getEvents(): self
|
||||
{
|
||||
return $this->flatMap(fn($talk): array => (array) $talk[self::KEY_EVENTS]);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return self<TKey, TValue>
|
||||
*/
|
||||
public function onlyPastTalks(): self
|
||||
{
|
||||
$today = Carbon::today()->format(self::DATE_FORMAT);
|
||||
|
||||
return $this->filter(fn(array $event): bool => $event[self::KEY_EVENT_DATE] < $today);
|
||||
}
|
||||
}
|
|
@ -1,59 +0,0 @@
|
|||
<?php
|
||||
|
||||
declare(strict_types=1);
|
||||
|
||||
namespace App\TwigExtension;
|
||||
|
||||
use App\Collection\TalkCollection;
|
||||
use Illuminate\Support\Collection;
|
||||
use Twig\Extension\AbstractExtension;
|
||||
use Twig\TwigFunction;
|
||||
|
||||
/**
|
||||
* @template TKey of array-key
|
||||
* @template TValue
|
||||
*/
|
||||
final class TalkExtension extends AbstractExtension
|
||||
{
|
||||
public function getFunctions()
|
||||
{
|
||||
return [
|
||||
new TwigFunction('get_last_event_date_for_talk', [$this, 'getLastEventDate']),
|
||||
new TwigFunction('get_past_talk_count', [$this, 'getPastTalkCount']),
|
||||
];
|
||||
}
|
||||
|
||||
/**
|
||||
* @param TValue $talk
|
||||
*/
|
||||
public function getLastEventDate($talk): ?string
|
||||
{
|
||||
return Collection::make($talk['events'])
|
||||
->pluck('date')
|
||||
->sort()
|
||||
->last();
|
||||
}
|
||||
|
||||
/**
|
||||
* @param iterable<int, TValue> $talks
|
||||
*/
|
||||
public function getPastTalkCount(iterable $talks = []): int
|
||||
{
|
||||
return $this->getEventsFromTalks($talks)->count();
|
||||
}
|
||||
|
||||
/**
|
||||
* @param iterable<int, TValue> $talks
|
||||
*
|
||||
* @return TalkCollection<int, TValue>
|
||||
*/
|
||||
private function getEventsFromTalks(iterable $talks): TalkCollection
|
||||
{
|
||||
$talkCollection = new TalkCollection($talks);
|
||||
|
||||
return $talkCollection
|
||||
->getEvents()
|
||||
->onlyPastTalks();
|
||||
}
|
||||
}
|
||||
|
62
website/src/components/Card.astro
Normal file
62
website/src/components/Card.astro
Normal file
|
@ -0,0 +1,62 @@
|
|||
---
|
||||
export interface Props {
|
||||
title: string;
|
||||
body: string;
|
||||
href: string;
|
||||
}
|
||||
|
||||
const { href, title, body } = Astro.props;
|
||||
---
|
||||
|
||||
<li class="link-card">
|
||||
<a href={href}>
|
||||
<h2>
|
||||
{title}
|
||||
<span>→</span>
|
||||
</h2>
|
||||
<p>
|
||||
{body}
|
||||
</p>
|
||||
</a>
|
||||
</li>
|
||||
<style>
|
||||
.link-card {
|
||||
list-style: none;
|
||||
display: flex;
|
||||
padding: 0.15rem;
|
||||
background-color: white;
|
||||
background-image: var(--accent-gradient);
|
||||
background-size: 400%;
|
||||
border-radius: 0.5rem;
|
||||
background-position: 100%;
|
||||
transition: background-position 0.6s cubic-bezier(0.22, 1, 0.36, 1);
|
||||
box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -2px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
.link-card > a {
|
||||
width: 100%;
|
||||
text-decoration: none;
|
||||
line-height: 1.4;
|
||||
padding: 1rem 1.3rem;
|
||||
border-radius: 0.35rem;
|
||||
color: #111;
|
||||
background-color: white;
|
||||
opacity: 0.8;
|
||||
}
|
||||
h2 {
|
||||
margin: 0;
|
||||
font-size: 1.25rem;
|
||||
transition: color 0.6s cubic-bezier(0.22, 1, 0.36, 1);
|
||||
}
|
||||
p {
|
||||
margin-top: 0.5rem;
|
||||
margin-bottom: 0;
|
||||
color: #444;
|
||||
}
|
||||
.link-card:is(:hover, :focus-within) {
|
||||
background-position: 0;
|
||||
}
|
||||
.link-card:is(:hover, :focus-within) h2 {
|
||||
color: rgb(var(--accent));
|
||||
}
|
||||
</style>
|
20
website/src/components/DailyEmailForm.astro
Normal file
20
website/src/components/DailyEmailForm.astro
Normal file
File diff suppressed because one or more lines are too long
22
website/src/components/Navbar.astro
Normal file
22
website/src/components/Navbar.astro
Normal file
|
@ -0,0 +1,22 @@
|
|||
<div>
|
||||
<div class="py-4 px-4 mx-auto max-w-2xl">
|
||||
<div class="flex flex-col justify-between items-center md:flex-row">
|
||||
<div>
|
||||
<a href="/">
|
||||
<svg
|
||||
aria-hidden="true"
|
||||
class="w-16 h-16 fill-current dark:text-blue-400 text-blue-primary md:w-18 md:h-18"
|
||||
viewBox="0 0 706 504"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<path d="M456.5 1.1c-12.3 1.5-31 5.5-44.1 9.4-12.7 3.9-63.6 24.6-64.1 26.2-.2.5 1.4 1.7 3.4 2.7 2.1 1 8.9 5.1 15.1 9.2l11.2 7.5 14.5-6c22.9-9.5 37.3-14 57.5-17.8 7.2-1.3 14.7-1.7 31-1.8 18.6 0 23.1.3 33 2.3 22 4.5 46.1 13.9 64.5 25.2 39.3 24.2 69.9 65.3 86.9 116.5 16.3 49.2 13 100.2-9.4 145.3-21.8 43.7-49.2 68.8-101.5 92.9-13.4 6.2-120.1 51.3-121.3 51.3-.5 0-15.7-35.1-33.7-78l-32.8-78 3.1-12.8c4.4-18 5.6-29.5 4.9-48.5-.6-16.8-2-25.7-6.3-38.7-12-35.8-40.8-69.2-74-85.6l-7.2-3.6-4.2-10c-2.4-5.6-3.9-10.3-3.4-10.7.5-.5 9.7-4.5 20.4-9s19.9-8.6 20.4-9.1c1.5-1.5-18.6-10.1-32.3-13.9l-6.8-1.9-20.6 8.7c-11.4 4.8-20.9 8.9-21.2 9.2-.2.3 2 6.3 5 13.3 3 7 5.5 12.8 5.5 12.9 0 .1-7.5.2-16.7.2-11.8.1-19.4.6-25.6 1.8-27.3 5.5-50.5 17.6-70.4 37-21.8 21.2-36.7 49-43 80.2-2.4 12.3-2.4 44 0 57.2 3.6 19.4 11.9 40.4 22.3 56 6.9 10.4 21.1 25.4 31.2 33 29.9 22.5 70.8 33.2 106.2 27.8 18.9-2.8 39.3-10.6 54.1-20.5 13.5-9.1 29.1-23.8 37.6-35.5 1.2-1.7 2.4-2.8 2.7-2.5.3.3 15.6 36.6 34.1 80.5 18.5 44 33.8 80.1 33.9 80.3.8.8 144.9-60.8 162.1-69.3 45.5-22.4 73.4-47.1 95.7-84.7 28-47.4 37.5-99.7 27.8-153.5-6.8-37.6-25-79-48.6-110.3-33.2-44.1-83-74.2-138.4-83.6-11.4-1.9-46.9-2.7-58.5-1.3zM259.2 141.4c42.4 10.9 77.8 50 84.8 93.8 1.6 9.9.8 34.5-1.4 44.8-5.2 24-15.5 43-32.6 60-20.7 20.6-42.8 31.3-67.7 32.7-26.9 1.5-53.2-6.2-74.3-21.7-29.4-21.7-46-56.2-46-95.7 0-45.4 27.2-89.6 66.1-107.2 8.2-3.7 21.7-7.9 29.4-9.1 10.4-1.6 30.8-.4 41.7 2.4z"/><path d="M201 20.6c-83 11.2-157 71-186.5 150.8-22.3 60.3-18.3 134.9 10.2 192 21.5 43.1 59.6 81.6 102.1 103.4 21.1 10.9 46.3 19 71.2 22.9 16.2 2.5 53.1 2.5 68.5 0 25.9-4.2 45.2-10.5 69-22.2 14.4-7.1 39.7-23.2 41.8-26.7.8-1.2-.2-4.6-4.3-14.5-2.9-7-5.7-13.2-6.1-13.7-.5-.5-4.3 1.7-8.6 5.1C320 447.1 277.6 462 232 462c-59.8 0-115.2-26.3-154.8-73.5-32.2-38.3-48.8-88.7-46.9-142 2-53.7 22.1-99.6 60.7-138.5 28.1-28.3 63-47.2 102.9-55.7 11.9-2.6 14.1-2.8 38.6-2.8 28.3 0 39.4 1.3 59 7 27.9 8.1 58.5 26.1 80.9 47.6l10.9 10.5-14.3 6c-7.8 3.2-14.5 6.4-14.7 7-.2.7 16.5 41.3 37.1 90.4 20.7 49.1 37.6 90.1 37.6 91.1 0 3.4-7.1 24.2-11.6 33.8-2.4 5.1-6.6 13.1-9.4 17.7l-5 8.3 6.6 15.6c5.5 12.9 6.9 15.4 8 14.4 2.8-2.3 19.2-27.8 24.4-37.9l5.3-10.3 8.3 19.8c4.7 10.9 8.7 20.1 8.9 20.3.6.6 60.8-24.6 74.5-31.2 29.7-14.2 52.7-35.8 65.3-61.1 16.9-34 17.6-70.8 2.2-112-17-45.3-45.8-76.7-82.5-90-18.2-6.5-43.1-9.1-63.5-6.5-11.5 1.4-30 5.8-40.3 9.5l-7.3 2.6L402.3 91c-20.3-21.3-37.9-34.3-65.4-48.3-33.4-17.1-63.7-23.8-105.9-23.6-10.7.1-24.2.7-30 1.5zM504.5 122c9.2 2.5 22.1 8.3 29.2 13.1 6.9 4.7 18.7 16.3 24.3 23.9 15.1 20.6 26.3 49 29 74 2.3 20.7-3.1 43-14.5 60.5-8.7 13.3-27.6 29.5-44.5 38-6 3.1-47.3 20.6-47.5 20.2-1.1-1.6-87.5-208-87.3-208.3.2-.2 7.4-3.3 15.8-6.9 22.5-9.5 33.8-13 55-16.9 1.4-.2 9.3-.3 17.5-.1 11.5.2 16.9.8 23 2.5z"/>
|
||||
</svg>
|
||||
<span class="sr-only">
|
||||
Oliver Davies
|
||||
</span>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
1
website/src/env.d.ts
vendored
Normal file
1
website/src/env.d.ts
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
/// <reference types="astro/client" />
|
71
website/src/layouts/Layout.astro
Normal file
71
website/src/layouts/Layout.astro
Normal file
|
@ -0,0 +1,71 @@
|
|||
---
|
||||
import '../../assets/css/tailwind.pcss'
|
||||
|
||||
import Navbar from '../components/Navbar.astro'
|
||||
|
||||
export interface Props {
|
||||
title: string;
|
||||
}
|
||||
|
||||
const { title } = Astro.props;
|
||||
|
||||
interface Link {
|
||||
title: string,
|
||||
href: string,
|
||||
}
|
||||
|
||||
const footerLinks = [
|
||||
{
|
||||
title: 'About',
|
||||
href: '/',
|
||||
},
|
||||
{
|
||||
title: 'Blog',
|
||||
href: '/blog',
|
||||
},
|
||||
{
|
||||
title: 'Talks',
|
||||
href: '/talks',
|
||||
},
|
||||
{
|
||||
title: 'Daily list',
|
||||
href: '/daily',
|
||||
},
|
||||
]
|
||||
---
|
||||
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width" />
|
||||
<meta name="generator" content={Astro.generator} />
|
||||
<link rel="icon" type="image/x-icon" href="/favicon.ico">
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com">
|
||||
<link href="https://fonts.googleapis.com/css2?family=Roboto+Condensed:ital,wght@0,300;0,400;0,700;1,300&display=swap" rel="stylesheet">
|
||||
<title>{title}</title>
|
||||
</head>
|
||||
<body>
|
||||
<div class="min-h-screen font-sans text-base font-light text-gray-900 md:text-xl dark:text-white dark:bg-gray-900">
|
||||
<Navbar />
|
||||
|
||||
<div class="py-10 px-4 mx-auto max-w-2xl md:py-10">
|
||||
<h1 class="text-xl font-bold md:text-2xl">{title}</h1>
|
||||
|
||||
<div>
|
||||
<slot />
|
||||
</div>
|
||||
|
||||
<div class="mt-16 mb-6">
|
||||
<footer>
|
||||
<nav class="flex flex-wrap justify-center -mb-3">
|
||||
{footerLinks && footerLinks.map(link => (
|
||||
<a class="mx-3 mb-3 text-sm md:text-lg dark:text-white hover:text-gray-900 link dark:hover:text-blue-400" href={link.href}>{link.title}</a>
|
||||
))}
|
||||
</nav>
|
||||
</footer>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
11
website/src/layouts/PageLayout.astro
Normal file
11
website/src/layouts/PageLayout.astro
Normal file
|
@ -0,0 +1,11 @@
|
|||
---
|
||||
import BaseLayout from './Layout.astro'
|
||||
|
||||
const { title } = Astro.props.frontmatter || Astro.props;
|
||||
---
|
||||
|
||||
<BaseLayout title={title}>
|
||||
<div class="markdown">
|
||||
<slot/>
|
||||
</div>
|
||||
</BaseLayout>
|
19
website/src/pages/ansible-course.mdx
Normal file
19
website/src/pages/ansible-course.mdx
Normal file
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
layout: ../layouts/PageLayout.astro
|
||||
title: Ansible email course
|
||||
---
|
||||
|
||||
<script src="https://f.convertkit.com/ckjs/ck.5.js"></script>
|
||||
|
||||
Register for my upcoming Ansible email course.
|
||||
|
||||
<form class="mx-auto mt-8 max-w-md" action="https://app.convertkit.com/forms/3588392/subscriptions" method="post" data-sv-form="3588392" data-uid="f0c1d2b57f" data-format="inline" data-version="5" data-options="{"settings":{"after_subscribe":{"action":"message","success_message":"Success! Now check your email to confirm your subscription.","redirect_url":""},"analytics":{"google":null,"fathom":null,"facebook":null,"segment":null,"pinterest":null,"sparkloop":null,"googletagmanager":null},"modal":{"trigger":"timer","scroll_percentage":null,"timer":5,"devices":"all","show_once_every":15},"powered_by":{"show":true,"url":"https://convertkit.com/features/forms?utm_campaign=poweredby&utm_content=form&utm_medium=referral&utm_source=dynamic"},"recaptcha":{"enabled":false},"return_visitor":{"action":"show","custom_content":""},"slide_in":{"display_in":"bottom_right","trigger":"timer","scroll_percentage":null,"timer":5,"devices":"all","show_once_every":15},"sticky_bar":{"display_in":"top","trigger":"timer","scroll_percentage":null,"timer":5,"devices":"all","show_once_every":15}},"version":"5"}" min-width="400"><div data-style="clean"><ul class="formkit-alert formkit-alert-error" data-element="errors" data-group="alert"></ul>
|
||||
<div data-element="fields" data-stacked="false" class="seva-fields formkit-fields"><div class="formkit-field">
|
||||
<input class="block mt-1 w-full" name="email_address" style="color: rgb(0, 0, 0); border-color: rgb(227, 227, 227); border-radius: 4px; font-weight: 400;" aria-label="Email Address" placeholder="What is your best email address?" required="" type="email"/>
|
||||
</div>
|
||||
<div class="mt-4">
|
||||
<button data-element="submit" class="inline-flex justify-center items-center py-3 px-6 w-full font-medium text-white no-underline rounded-md border duration-200 ease-in-out hover:bg-white focus:bg-white border-blue-primary bg-blue-primary transition-color hover:text-blue-primary focus:text-blue-primary">
|
||||
<div class="formkit-spinner"><div></div><div></div><div></div></div><span class="">Register for updates →</span>
|
||||
</button></div>
|
||||
</div>
|
||||
</div></form>
|
8
website/src/pages/archive.xml.js
Normal file
8
website/src/pages/archive.xml.js
Normal file
|
@ -0,0 +1,8 @@
|
|||
import rss from '@astrojs/rss';
|
||||
|
||||
export const get = () => rss({
|
||||
title: 'Daily list',
|
||||
description: '',
|
||||
site: 'https://www.oliverdavies.uk',
|
||||
items: import.meta.glob('./daily-emails/**/*.{md,mdx}'),
|
||||
})
|
21
website/src/pages/archive/[...slug].astro
Normal file
21
website/src/pages/archive/[...slug].astro
Normal file
|
@ -0,0 +1,21 @@
|
|||
---
|
||||
import DailyEmailForm from '../../components/DailyEmailForm.astro'
|
||||
import PageLayout from '../../layouts/PageLayout.astro'
|
||||
|
||||
export async function getStaticPaths() {
|
||||
const emails = await Astro.glob('../daily-emails/*.md')
|
||||
|
||||
return emails.map(email => ({
|
||||
params: {
|
||||
slug: email.frontmatter.permalink.replace('archive/', ''),
|
||||
},
|
||||
props: {
|
||||
email,
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
const { Content } = Astro.props.email
|
||||
---
|
||||
|
||||
<Content />
|
28
website/src/pages/archive/index.astro
Normal file
28
website/src/pages/archive/index.astro
Normal file
|
@ -0,0 +1,28 @@
|
|||
---
|
||||
import PageLayout from '../../layouts/PageLayout.astro'
|
||||
|
||||
const emails = await Astro.glob('../daily-emails/*.md');
|
||||
|
||||
const sortedEmails = emails
|
||||
.sort((a, b) =>
|
||||
new Date(b.frontmatter.pubDate).valueOf() -
|
||||
new Date(a.frontmatter.pubDate).valueOf()
|
||||
)
|
||||
---
|
||||
|
||||
<PageLayout title="Daily emails archive">
|
||||
<ul>
|
||||
{sortedEmails.map(email => (
|
||||
<li>
|
||||
<a href={`/${email.frontmatter.permalink}`}>
|
||||
{new Date(email.frontmatter.pubDate).toLocaleDateString('en-GB', {
|
||||
day: 'numeric',
|
||||
month: 'long',
|
||||
year: 'numeric',
|
||||
})} -
|
||||
{email.frontmatter.title}
|
||||
</a>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</PageLayout>
|
29
website/src/pages/blog/[slug].astro
Normal file
29
website/src/pages/blog/[slug].astro
Normal file
|
@ -0,0 +1,29 @@
|
|||
---
|
||||
import Layout from '../../layouts/Layout.astro'
|
||||
|
||||
export async function getStaticPaths() {
|
||||
const posts = await Astro.glob('../../posts/*.md')
|
||||
|
||||
return posts
|
||||
.map(post => {
|
||||
const parts = post.file.replace('.md', '').split('/')
|
||||
const slug = parts[parts.length - 1]
|
||||
|
||||
return {
|
||||
params: { slug },
|
||||
props: { post },
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const { Content } = Astro.props.post
|
||||
const { title } = Astro.props.post.frontmatter
|
||||
---
|
||||
|
||||
<Layout title={title}>
|
||||
<div class="space-y-6">
|
||||
<div class="markdown">
|
||||
<Content />
|
||||
</div>
|
||||
</div>
|
||||
</Layout>
|
50
website/src/pages/blog/index.astro
Normal file
50
website/src/pages/blog/index.astro
Normal file
|
@ -0,0 +1,50 @@
|
|||
---
|
||||
import PageLayout from '../../layouts/PageLayout.astro'
|
||||
|
||||
const posts = await Astro.glob("../../posts/*.md")
|
||||
|
||||
// TODO: show all posts when running locally.
|
||||
const filteredPosts = posts
|
||||
.filter(post => !post.frontmatter.draft)
|
||||
.filter(post => post.frontmatter.date)
|
||||
|
||||
const sortedPosts = filteredPosts
|
||||
.map(post => {
|
||||
const parts = post.file.replace('.md', '').split('/')
|
||||
const slug = parts[parts.length - 1]
|
||||
|
||||
return { post, slug }
|
||||
})
|
||||
.sort((a, b) =>
|
||||
new Date(b.post.frontmatter.date).valueOf() -
|
||||
new Date(a.post.frontmatter.date).valueOf()
|
||||
)
|
||||
---
|
||||
|
||||
<PageLayout title="Blog">
|
||||
<p>This is where I publish my personal blog posts as well as technical posts and tutorials on topics such as Drupal, PHP, Tailwind CSS, automated testing, and systems administration. </p>
|
||||
|
||||
<div>
|
||||
{sortedPosts.map((post) => (
|
||||
<article>
|
||||
<a href=`/blog/${post.slug}`>
|
||||
<h2>{post.post.frontmatter.title}</h2>
|
||||
</a>
|
||||
|
||||
{post.post.frontmatter.date && (
|
||||
<time class="text-base" datetime={post.post.frontmatter.date}>
|
||||
{new Date(post.post.frontmatter.date).toLocaleDateString('en-GB', {
|
||||
day: 'numeric',
|
||||
month: 'long',
|
||||
year: 'numeric',
|
||||
})}
|
||||
</time>
|
||||
)}
|
||||
|
||||
<div class="mt-1">
|
||||
<p>{post.post.frontmatter.excerpt}</p>
|
||||
</div>
|
||||
</article>
|
||||
))}
|
||||
</div>
|
||||
</PageLayout>
|
31
website/src/pages/call.mdx
Normal file
31
website/src/pages/call.mdx
Normal file
|
@ -0,0 +1,31 @@
|
|||
---
|
||||
layout: ../layouts/PageLayout.astro
|
||||
title: Book a 1-on-1 consulting call
|
||||
link: https://savvycal.com/opdavies/consulting-call
|
||||
price: 149
|
||||
---
|
||||
|
||||
## How it works
|
||||
|
||||
- You book a 60 minute consulting call with me.
|
||||
- Once payment is received, you’ll obtain a link to schedule a meeting in my calendar.
|
||||
- The meeting will take place over Zoom.
|
||||
|
||||
<a class="my-4 py-2 px-5 inline-block rounded-lg border-2 border-blue-primary text-lg bg-blue-primary text-white no-underline transition-colors duration-200 hover:bg-white hover:text-blue-primary" href={frontmatter.link}>Book your call now →</a>
|
||||
|
||||
If you don’t find the call valuable, I’ll refund 100% of the cost.
|
||||
|
||||
### Typical subjects of interest include
|
||||
|
||||
- How to approach a new project or task.
|
||||
- Help writing your first automated test or starting with test-driven development.
|
||||
- Introducing static analysis or other code quality tools to your project.
|
||||
- Automating tasks with Docker and/or Ansible.
|
||||
- Help to fix a bug or some broken code.
|
||||
- Reviewing your code and providing advice and suggestions.
|
||||
|
||||
<aside class="p-6 my-8 border border-gray-300 dark:bg-gray-800 dark:border-gray-700" markdown="1">
|
||||
<h2 class="mt-0">Ready to book your call?</h2>
|
||||
|
||||
<a class="mt-4 py-2 px-5 inline-block rounded-lg border-2 border-blue-primary text-lg bg-blue-primary text-white no-underline transition-colors duration-200 hover:bg-white hover:text-blue-primary" href={frontmatter.link}>Book now for £{frontmatter.price} →</a>
|
||||
</aside>
|
10
website/src/pages/company-information.mdx
Normal file
10
website/src/pages/company-information.mdx
Normal file
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
layout: ../layouts/PageLayout.astro
|
||||
title: Company information
|
||||
---
|
||||
|
||||
Company name : Oliver Davies Ltd (previously Oliver Davies Web Development Ltd)
|
||||
|
||||
Registered address : 3 Westfield Close, Caerleon, Newport, NP18 3ED
|
||||
|
||||
Company number : 8017706
|
12
website/src/pages/contact.mdx
Normal file
12
website/src/pages/contact.mdx
Normal file
|
@ -0,0 +1,12 @@
|
|||
---
|
||||
layout: ../layouts/PageLayout.astro
|
||||
title: Contact Oliver
|
||||
---
|
||||
|
||||
export const email = 'oliver@oliverdavies.uk'
|
||||
|
||||
The best way to get in touch with me is via email: <a href={`mailto:${email}`}>{email}</a>. I usually reply within one business day.
|
||||
|
||||
I'm also on [LinkedIn][linkedin].
|
||||
|
||||
[linkedin]: https://www.linkedin.com/in/opdavies
|
|
@ -0,0 +1,75 @@
|
|||
---
|
||||
permalink: archive/2022/08/12/git-worktrees-docker-compose
|
||||
title: Git Worktrees and Docker Compose
|
||||
pubDate: 2022-08-12
|
||||
---
|
||||
|
||||
I've recently started trialing Git worktrees again as part of my development workflow.
|
||||
|
||||
If you are unfamiliar with Git worktrees, they allow you to have muliple branches of a repository checked out at the same time in different directories.
|
||||
|
||||
For example, this is what I see within my local checkout of my website repository:
|
||||
|
||||
```
|
||||
.
|
||||
├── config
|
||||
├── HEAD
|
||||
├── main
|
||||
│ ├── ansible
|
||||
│ ├── nginx
|
||||
│ ├── README.md
|
||||
│ └── website
|
||||
├── new-post
|
||||
│ ├── ansible
|
||||
│ ├── nginx
|
||||
│ ├── README.md
|
||||
│ └── website
|
||||
├── objects
|
||||
│ ├── info
|
||||
│ └── pack
|
||||
├── packed-refs
|
||||
├── refs
|
||||
│ ├── heads
|
||||
│ └── tags
|
||||
└── worktrees
|
||||
├── main
|
||||
└── new-post
|
||||
```
|
||||
|
||||
The first thing that you'll notice is, because it's a bare clone, it looks a little different to a what you usually see in a Git repository.
|
||||
|
||||
Each worktree has it's own directory, so my "main" branch inside the `main` directory.
|
||||
|
||||
If I need to work on a different branch, such as `new-post`, then I can create a new worktree, move into that directory and start working. I don't need to commit or stash any in-progress work and switch branches.
|
||||
|
||||
## Complications with Docker Compose
|
||||
|
||||
I use Docker and Docker Compose for my projects, and this caused some issues for me the last time that I tried using worktrees.
|
||||
|
||||
By default, Docker Compose will use the name of the directory that the Compose file is in to name its containers. If the directory name is "oliverdavies-uk", then the containers will be `oliverdavies-uk-web_1`, `oliverdavies-uk-db_1` etc.
|
||||
|
||||
This doesn't work so well if the directory is a worktree called "main" or "master" as you'll have containers called `main_web_1` or `master_db_1`.
|
||||
|
||||
The way to solve this is to use the `COMPOSE_PROJECT_NAME` environment variable.
|
||||
|
||||
If you prefix Docker Compose commands with `COMPOSE_PROJECT_NAME=your-project`, or add it to an `.env` file (Docker Compose will load this automatically), then this will override the prefix in the container names to be `your-project-{service}`.
|
||||
|
||||
## Container names per worktree
|
||||
|
||||
Whilst you could use the same Compose project name within all of your worktrees, I prefer to include the worktree name as a suffix - something like `my-project-main` or `my-project-staging` - and keep these stored in an `.env` file in each worktree's directory.
|
||||
|
||||
As each worktree now has unique container names, I can have multiple instances of a project running at the same time, and each worktree will have it's own separate data - meaning that I can make changes and test something in one worktree without affecting any others.
|
||||
|
||||
You can also use the `COMPOSE_PROJECT_NAME` variable inside Docker Compose files.
|
||||
|
||||
For example, if you use Traefik and needed to override the host URL for a service, the string will be interpolated and the project name would be injected as you'd expect.
|
||||
|
||||
```yaml
|
||||
labels:
|
||||
- "traefik.http.routers.${COMPOSE_PROJECT_NAME}.rule=Host(
|
||||
`${COMPOSE_PROJECT_NAME}.docker.localhost`,
|
||||
`admin.${COMPOSE_PROJECT_NAME}.docker.localhost`
|
||||
)"
|
||||
```
|
||||
|
||||
This means that Traefik would continue to use a different URL for each worktree without you needing to make any changes to your Docker Compose file.
|
|
@ -0,0 +1,47 @@
|
|||
---
|
||||
permalink: archive/2022/08/13/i-wrote-a-neovim-plugin
|
||||
pubDate: 2022-08-13
|
||||
title: I wrote a Neovim plugin
|
||||
tags:
|
||||
- neovim
|
||||
- open-source
|
||||
---
|
||||
|
||||
I enjoy writing and working with open-source software, starting back to when I started working with PHP and Drupal in 2007.
|
||||
|
||||
Since then, I've written and maintained a number of Drupal modules and themes, PHP libraries, npm packages, Ansible roles and Docker images - all of which are available on my GitHub and Drupal.org pages.
|
||||
|
||||
Just over a year ago, [I switched to using Neovim full-time](/blog/going-full-vim) for my development and DevOps work, and last week, I wrote my first Neovim plugin, written in Lua.
|
||||
|
||||
I've used Lua to configure Neovim but this is the first time that I've written and open-sourced a standalone Neovim plugin.
|
||||
|
||||
It's called [toggle-checkbox.nvim](https://github.com/opdavies/toggle-checkbox.nvim) and is used toggle checkboxes in Markdown files - something that I use frequently for to-do lists.
|
||||
|
||||
For example, this a simple list containing both checked and unchecked checkboxes:
|
||||
|
||||
```markdown
|
||||
- [x] A completed task
|
||||
- [ ] An incomplete task
|
||||
```
|
||||
|
||||
To toggle a checkbox, the `x` character needs to be either added or removed, depending on whether we're checking or unchecking it.
|
||||
|
||||
This is done by calling the `toggle()` function within the plugin.
|
||||
|
||||
In my Neovim configuration, I've added a keymap to do this:
|
||||
|
||||
```lua
|
||||
vim.keymap.set(
|
||||
"n",
|
||||
"<leader>tt",
|
||||
"require('toggle-checkbox').toggle()"
|
||||
)
|
||||
```
|
||||
|
||||
This means that I can use the same keymap by running `<leader>tt` to check or uncheck a checkbox. I could use Vim's replace mode to do this, but I really wanted to have one keymap that I could use for both.
|
||||
|
||||
As it's my first Neovim plugin, I decided to keep it simple.
|
||||
|
||||
The main `toggle-checkbox.lua` file is currently only 41 lines of code, and whilst there is an existing Vim plugin that I could have used, I was excited to write my own plugin for Neovim, to start contributing to the Neovim ecosystem, and add a Neovim plugin to my portfolio of open-source projects.
|
||||
|
||||
You can view the plugin at <https://github.com/opdavies/toggle-checkbox.nvim>, as well as my Neovim configuration (which is also written in Lua) as part of [my Dotfiles repository](https://github.com/opdavies/dotfiles/tree/main/roles/neovim/files).
|
|
@ -0,0 +1,36 @@
|
|||
---
|
||||
permalink: archive/2022/08/14/why-i-write-tests
|
||||
pubDate: 2022-08-14
|
||||
title: "Why I write automated tests"
|
||||
tags: [testing]
|
||||
---
|
||||
|
||||
In February 2012, I saw a tweet from Tim Millwood asking if anyone wanted to maintain or co-maintain a Drupal module called [Override Node Options](https://www.drupal.org/project/override_node_options).
|
||||
|
||||
It had more than 9,200 active installations at that time, with versions for Drupal 5, 6 and 7.
|
||||
|
||||
I said yes and became the module’s maintainer.
|
||||
|
||||
The module now has versions for Drupal 7, 8 and 9, with (at the latest count, according to Drupal.org) 32,292 active installations - which makes it currently the 197th most installed module.
|
||||
|
||||
There have been two main things that come to mind with this module, related to automated testing.
|
||||
|
||||
Before I become the maintainer, a feature request had been created, along with a large patch file, to add some new permissions to the module. There were some large merge conflicts that stopped me from just committing the changes but I was able to fix them manually and, because the tests still passed, ensure that the original functionality still worked. There weren’t tests for the new permissions but I committed the patch and added the tests later.
|
||||
|
||||
Without the tests to ensure that the original functionality still worked, I probably wouldn’t have committed the patch and would have just closed the issue.
|
||||
|
||||
More recently, a friend and ex-colleague and I decided to refactor some of the module's code.
|
||||
|
||||
We wanted to split the `override_node_options.module` file so that each override was in its own file and its own class. This would make them easier to edit and maintain, and if anyone wanted to add a new one, they’d just need to create a new file for it and add it to the list of overrides.
|
||||
|
||||
Without the tests ensuring that the module still worked after the refactor, we probably wouldn’t have done it as it was used on over 30,000 sites that I didn't want to break.
|
||||
|
||||
When I was learning about testing, I was working on projects where I was writing the code during the day and the tests in the evening on my own time.
|
||||
|
||||
I remember once when my manual testing had been fine, but when writing the test, I found that I’d used an incorrect permission name in the code that was causing the test to fail. This was a bug that, rather than waiting for a QA Engineer or the client to discover and report, I was able to fix it locally before I'd even committed the code.
|
||||
|
||||
I also worked on an event booking and management website, where we had code responsible for calculating the number of available spaces for an event based on orders, determining the correct price based on the customer's status and the time until the event, creating voucher codes for new members and event leaders, and bulk messaging event attendees. All of the custom functionality was covered by automated tests.
|
||||
|
||||
The great thing about testing is that it gives you confidence that everything still works how you expect - not only when you wrote the code, but also in the future.
|
||||
|
||||
I've talked about this, and how to get started with automated testing in Drupal, in a presentation called [TDD - Test-Driven Drupal]({{site.url}}/talks/tdd-test-driven-drupal). If you want to find out more, the slides and a video recording are embedded there.
|
|
@ -0,0 +1,84 @@
|
|||
---
|
||||
permalink: archive/2022/08/15/using-run-file-simplify-project-tasks
|
||||
pubDate: 2022-08-15
|
||||
title: Using a "run" file to simplify project tasks
|
||||
tags: ["php"]
|
||||
---
|
||||
|
||||
Every project has its own set of commands that need to be run regularly.
|
||||
|
||||
From starting a local server or the project's containers with Docker or Docker Compose, running tests or clearing a cache, or generating the CSS and JavaScript assets, these commands can get quite complicated and time-consuming and error-prone to type over and over again.
|
||||
|
||||
One common way to simplify these commands is using a `Makefile`.
|
||||
|
||||
A Makefile contains a number of named targets that you can reference, and each has one or more commands that it executes.
|
||||
|
||||
For example:
|
||||
|
||||
```yaml
|
||||
# Start the project.
|
||||
start:
|
||||
docker-compose up -d
|
||||
|
||||
# Stop the project.
|
||||
stop:
|
||||
docker-compose down
|
||||
|
||||
# Run a Drush command.
|
||||
drush:
|
||||
docker-compose exec php-fpm drush $(ARGS)
|
||||
```
|
||||
|
||||
With this Makefile, I can run `make start` to start the project, and `make stop` to stop it.
|
||||
|
||||
Makefiles work well, but I don't use the full functionality that they offer, such as dependencies for targets, and passing arguments to a command - like arguments for a Drush, Symfony Console, or Artisan command, doesn't work as I originally expected.
|
||||
|
||||
In the example, to pass arguments to the `drush` command, I'd have to type `ARGS="cache:rebuild" make drush` for them to get added and the command to work as expected.
|
||||
|
||||
An agency that I worked for created and open-sourced their own Makefile-like tool, written in PHP and built on Symfony Console. I gave a talk on it called [Working with Workspace]({{site.url}}/talks/working-with-workspace) and used it on some of my own personal and client projects.
|
||||
|
||||
## What I'm using now
|
||||
|
||||
The solution that I'm using now is a `run` file, which is something that I learned from Nick Janetakis' blog and YouTube channel.
|
||||
|
||||
It's a simple Bash file where you define your commands (or tasks) as functions, and then execute them by typing `./run test` or `./run composer require something`.
|
||||
|
||||
Here's the Makefile example, but as a `run` script:
|
||||
|
||||
```bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
function help() {
|
||||
# Display some default help text.
|
||||
# See examples on GitHub of how to list the available tasks.
|
||||
}
|
||||
|
||||
function start {
|
||||
# Start the project.
|
||||
docker-compose up -d
|
||||
}
|
||||
|
||||
function stop {
|
||||
# Stop the project.
|
||||
docker-compose down
|
||||
}
|
||||
|
||||
function drush {
|
||||
# Run a Drush command with any additional arguments.
|
||||
# e.g. "./run drush cache:rebuild"
|
||||
docker-compose exec php-fpm drush "${@}"
|
||||
}
|
||||
|
||||
# Execute the command, or run "help".
|
||||
eval "${@:-help}"
|
||||
```
|
||||
|
||||
As it's Bash, I can just use `$1`, `$2` etc to get specific arguments, or `$@` to get them all, so `./run drush cache:rebuild` works as expected and any additional arguments are included.
|
||||
|
||||
You can group tasks by having functions like `test:unit` and `test:commit`, and tasks can run other tasks. I use this for running groups of commands within a CI pipeline, and to extract helper functions for tasks like running `docker-compose exec` within the PHP container that other commands like `drush`, `console` or `composer` could re-use.
|
||||
|
||||
As well as running ad-hoc commands during development, I also use the run file to create functions that run Git pre-commit or pre-push hooks, deploy code with Ansible, or build, push or pull the project's latest Docker images.
|
||||
|
||||
I also use one within my Talks repository to generate PDF files using rst2pdf, present them using phdpc, and generate thumbnail images.
|
||||
|
||||
For examples of `run` files that I use in my open-source code, [you can look in my public GitHub repositories](https://github.com/search?l=Shell&q=user%3Aopdavies+filename%3Arun&type=Code), and for more information, here is [Nick's blog post where I first found the idea](https://nickjanetakis.com/blog/replacing-make-with-a-shell-script-for-running-your-projects-tasks).
|
|
@ -0,0 +1,42 @@
|
|||
---
|
||||
permalink: archive/2022/08/16/what-are-git-hooks-why-are-they-useful
|
||||
pubDate: 2022-08-16
|
||||
title: "What are Git hooks and why are they useful?"
|
||||
tags: ["git"]
|
||||
---
|
||||
|
||||
In yesterday's email, I mentioned Git hooks but didn't go into any detail. So, what are they?
|
||||
|
||||
Git hooks are Bash scripts that you add to your repository that are executed when certain events happen, such as before a commit is made or before a push to a remote.
|
||||
|
||||
By default, the script files need to be within the `.git/hooks` directory, have executable permissions, and be named to exactly match the name of the hook - e.g. `pre-push` - with no file extension.
|
||||
|
||||
If it returns an error exit code then the process is stopped and the action doesn't complete.
|
||||
|
||||
This is useful if, for example, you or your team use a specified format for commit messages and you want to prevent the commit if the message doesn't match the requirements.
|
||||
|
||||
But, the main benefit that I get from Git hooks if from the `pre-push` hook.
|
||||
|
||||
I use it to run a subset of the checks that are run within project's CI pipeline to limit failures in the CI tool and fix simple errors before I push the code.
|
||||
|
||||
Typically, these are the quicker tasks such as ensuring the Docker image builds, running linting and static analysis, validating lock files, and some of the automated tests if they don't take too long to run.
|
||||
|
||||
If a build is going to fail because of something simply like a linting error, then I'd rather find that out and fix it locally rather than waiting for a CI tool to fail.
|
||||
|
||||
Also, if you're utilising trunk-based development and continuous integration where team members are pushing changes regularly, then you want to keep the pipeline in a passing, deployable state as much as possible and prevent disruption.
|
||||
|
||||
But what have Git hooks got to do with the "run" file?
|
||||
|
||||
Firstly, I like to keep the scripts as minimal as possible and move the majority of the code into functions within the `run` file. This means that the scripts are only responsible for running functions like `./run test:commit` and returning the appropriate exit code, but also means that it's easy to iterate and test them locally without making fake commits or trying to push them to your actual remote repository (and hoping that they don't get pushed).
|
||||
|
||||
Secondly, I like to simplify the setup of Git hooks with their own functions.
|
||||
|
||||
For security reasons, the `.git/hooks` directory cannot be committed and pushed to your remote so they need to be enabled per user within their own clone of the repository.
|
||||
|
||||
A common workaround is to put the scripts in a directory like `.githooks` and either symlink them to where Git expects them to be, or to use the `core.hooksPath` configuration option and change where Git is going to look.
|
||||
|
||||
I like to lower the barrier for any team members by creating `git-hooks:on` and `git-hooks:off` functions which either set or unset the `core.hooksPath`. If someone wants to enable the Git hooks then they only need to run one of those commands rather than having to remember the name of the configuration option or manually creating or removing symlinks.
|
||||
|
||||
There are other Git hooks that can be used but just using `pre-commit` and `pre-push` has saved me and teams that I've worked on both Developer time and build minutes, provides quicker feedback and fewer disruptions in our build pipelines, and I like how simple it can be by creating custom functions in a `run` file.
|
||||
|
||||
Lastly, I've created <https://github.com/opdavies/git-hooks-scratch> as an example with a minimal `run` file and some example hooks.
|
|
@ -0,0 +1,39 @@
|
|||
---
|
||||
permalink: archive/2022/08/17/one-more-run-command-git-worktrees
|
||||
pubDate: 2022-08-17
|
||||
title: One more "run" command, for Git worktrees
|
||||
tags: ["drupal", "git"]
|
||||
---
|
||||
|
||||
Here's another `run` file example, this time relating to Git worktrees...
|
||||
|
||||
One project that I work on is a multilingual Drupal application that needs to work in both English and Welsh. As I'm cloning a fresh version today, I'm doing it as a bare repository so I can use worktrees.
|
||||
|
||||
To work on it locally, just like in production, I need to use a different URL for each language so that Drupal can identify it and load the correct content and configuration.
|
||||
|
||||
For fixed environments like production or staging, the URLs are set in configuration files, but for ad-hoc environments such as local worktrees, I thought that the best approach was to override them as needed per worktree using Drush (a Drupal CLI tool).
|
||||
|
||||
I could do this manually each time or I could automate it in a `run` command. :)
|
||||
|
||||
Here's the function that I came up with:
|
||||
|
||||
```bash
|
||||
function drupal:set-urls-for-worktree {
|
||||
# Set the site URLs based on the current Git worktree name.
|
||||
local worktree_name="$(basename $PWD)"
|
||||
|
||||
local cy_url="cy-projectname-${worktree_name}.docker.localhost"
|
||||
local en_url="projectname-${worktree_name}.docker.localhost"
|
||||
|
||||
# Update the URLs.
|
||||
drush config:set language.negotiation url.domains.cy -y $cy_url
|
||||
drush config:set language.negotiation url.domains.en -y $en_url
|
||||
|
||||
# Display the domains configuration to ensure that they were set correctly.
|
||||
drush config:get language.negotiation url.domains
|
||||
}
|
||||
```
|
||||
|
||||
It builds the worktree URL for each language based on the directory name, executes the configuration change, and finally displays the updated configuration so I can confirm that it's been set correctly.
|
||||
|
||||
This is a good example of why I like using `run` files and how I use them to automate and simplify parts of my workflow.
|
|
@ -0,0 +1,27 @@
|
|||
---
|
||||
permalink: archive/2022/08/18/talking-drupal-tailwind-css
|
||||
pubDate: 2022-08-18
|
||||
title: "'Talking Drupal' and Tailwind CSS"
|
||||
tags:
|
||||
- css
|
||||
- tailwind-css
|
||||
- twig
|
||||
---
|
||||
|
||||
In March, I was a guest again on the Talking Drupal podcast. This time I was talking about utility CSS and, in particular, the Tailwind CSS framework.
|
||||
|
||||
I've become a big fan of this approach to styling websites and was an early adopter of Tailwind, and have released [a starter-kit theme](https://www.drupal.org/project/tailwindcss) for building custom Drupal themes with Tailwind CSS based on what I was using for my own client projects.
|
||||
|
||||
## Rebuilding Talking Drupal with Tailwind
|
||||
|
||||
Usually when I give a Tailwind CSS talk at a conference or user group, I rebuild something familiar - maybe a page of their website - as an example and to explain some of the concepts and anything that was particularly interesting during the build. (I have [a blog post]({{site.url}}/blog/uis-ive-rebuilt-tailwind-css) that lists the ones that I've done before).
|
||||
|
||||
After this podcast episode, I built a [Tailwind version of the Talking Drupal homepage](https://talking-drupal-tailwindcss.oliverdavies.uk).
|
||||
|
||||
But, given that Drupal uses Twig and that we'd talked about best practices around using a templating engine to use loops and extract components to organise code and reduce duplication, I definitely wanted to build this example using Twig templates.
|
||||
|
||||
Drupal seemed like too much for a single page example, and Symfony or Sculpin could distract from the main focus of the demo, so I decided to start from scratch with an empty PHP file and add Twig and any other dependencies myself.
|
||||
|
||||
[The code repository](https://github.com/opdavies/talking-drupal-tailwindcss) is publicly viewable on my GitHub profile so people can look at the code and see some of the things that I talked about during the episode in practice and not just the resulting HTML a browser.
|
||||
|
||||
You can [listen to the episode](https://talkingdrupal.com/338), and if you want any more information, the slides and video from my [Taking Flight with Tailwind CSS talk]({{site.url}}/talks/taking-flight-with-tailwind-css) are on my website.
|
|
@ -0,0 +1,25 @@
|
|||
---
|
||||
permalink: archive/2022/08/19/pair-programming-or-code-reviews
|
||||
pubDate: 2022-08-19
|
||||
title: Pair programming or code reviews?
|
||||
---
|
||||
|
||||
It's been almost a year and a half since I last pushed a feature branch, created a pull request, and waited for it to be reviewed and (hopefully) merged and deployed.
|
||||
|
||||
On the majority of teams and projects that I've worked on, this was how things were done.
|
||||
|
||||
Tasks would be worked on in separate branches which would need to be reviewed by one or more other Developers before being merged.
|
||||
|
||||
I'm an advocate for continuous integration and trunk-based development (both I plan on writing about in more depth) in which there is no formal code review step, but instead, I encourage people to pair program as much as possible.
|
||||
|
||||
Pair or mob (group) programming, for me, is like a real-time code review where you can discuss and make changes instantly, rather than waiting until the work is complete and someone reviewing it after the fact. If a bug is spotted as you're typing it or something could be named better, you can update it there and then.
|
||||
|
||||
But there are other benefits too.
|
||||
|
||||
Instead of one person writing some code, and others reviewing it after the fact, multiple people have written it together and the knowledge is shared amongst those people.
|
||||
|
||||
As you've worked together, you don't need to ask or wait for someone to set time aside to review your changes, so it's quicker for them to be merged and deployed. It's already been reviewed, so as long as any automated checks pass, the code can be merged.
|
||||
|
||||
I've worked in pairs where I've taught someone how to write automated tests and do test-driven development, which I suspect wouldn't have been quite the same if they'd just read the finished code afterwards.
|
||||
|
||||
Of course, some Developers and teams will prefer the typical code review process - it's worked well for me and projects that I've worked on in the past - but personally, I like the speed, agility, mentoring and learning, and social benefits that I can get more easily from pair programming.
|
22
website/src/pages/daily-emails/2022-08-20.md
Normal file
22
website/src/pages/daily-emails/2022-08-20.md
Normal file
|
@ -0,0 +1,22 @@
|
|||
---
|
||||
pubDate: 2022-08-20
|
||||
title: "A return to offline meetups and conferences"
|
||||
permalink: "archive/2022/08/20/return-to-offline-meetups-conferences"
|
||||
tags: ["community"]
|
||||
---
|
||||
|
||||
Yesterday, I dusted off our Meetup page and posted our next [PHP South Wales meetup](https://www.meetup.com/php-south-wales) event.
|
||||
|
||||
We've had online meetups and code practice sessions throughout the pandemic and during lockdowns, but this will be our first offline/in person/IRL meetup since February 2020.
|
||||
|
||||
As well as organising our online meetups during COVID, I attended a lot of other online events, [usually giving various talks or workshops]({{site.url}}/blog/speaking-remotely-during-covid-19), and whilst they were good for a while, I eventually started to get burned out by them.
|
||||
|
||||
I've been an organiser of various meetups and conferences for a long time, and attending events has been a very large part of my career so far - providing opportunities to learn, to network and socialise with other attendees, and pass knowledge on through talks, workshops and mentoring.
|
||||
|
||||
It's been great to see some offline events returning, from local user groups to conferences such as DevOpsDays, DrupalCon and SymfonyLive.
|
||||
|
||||
I've given one talk this year - a lot less than this time last year - but it was in front of an audience instead of a screen, and whilst it seemed strange, I'm sure that it's something that will feel normal again in time.
|
||||
|
||||
I'm thinking of attending a conference next month, I've submitted some talk suggestions to some other conferences which I'm waiting to hear from, and am considering travelling to some of the other UK user groups as they restart - some of which I joined or spoke at online but it would be great to meet them in person.
|
||||
|
||||
For next week, I'll be glad to have PHP South Wales events running again and to see our community back together in person, and then do it again and start getting ready for next month's event.
|
29
website/src/pages/daily-emails/2022-08-21.md
Normal file
29
website/src/pages/daily-emails/2022-08-21.md
Normal file
|
@ -0,0 +1,29 @@
|
|||
---
|
||||
permalink: archive/2022/08/21/2022-08-21
|
||||
pubDate: 2022-08-21
|
||||
title: "Why I use Docker and Docker Compose for my projects"
|
||||
tags:
|
||||
- docker
|
||||
---
|
||||
|
||||
For the last few years, I've used Docker and Docker Compose exclusively on all of my projects. When I start a new project or onboard a new client, usually one of the first things that I need to do is get an application running in Docker so that I can work on it.
|
||||
|
||||
<!-- Since I started programming, I've used a number of different local environments. Starting with WAMP and XAMPP on Windows, MAMP on macOS, Laravel Valet, the Symfony local server, and various open-source Docker-based solutions. -->
|
||||
|
||||
I've inherited projects with no environment configuration or documentation at all and I need to start from scratch to get it running. Ideally, each project would have it's local environment configuration in the same Git repository as the application code.
|
||||
|
||||
For my own projects, these days I prefer to use Docker and Docker Compose - creating my own Dockerfiles for each project so that the correct dependencies are present and the required build steps are executed, as well as acting as documentation.
|
||||
|
||||
It's lean as the environment is built specifically for each project, and easy to configure using Docker and Docker Compose directly using native patterns such as override files, environment variables and interpolation, and multi-stage builds.
|
||||
|
||||
The configuration can be as simple or complicated as it needs to be for each project rather than using "a one size fits all" approach. If I'm working with Drupal, Fractal, Vue.js, a PHP library, a Go command line tool, or something else entirely, I can use the most appropriate starting point.
|
||||
|
||||
As well as local developments, it's easy to use Docker and Docker Compose in CI environments with tools like GitHub Actions and Bitbucket Pipelines. They will either be present by default or will be easy to install, and it's simple to run a `docker-compose build` or `docker-compose run` command within a pipeline to check that the project builds correctly and to execute tasks such as automated tests or static analysis.
|
||||
|
||||
As well as using it for projects, Docker has been useful for me in other situations where I need to run small tools such as rst2pdf for generating presentation slides, and ADR Tools for working with architectural decision records.
|
||||
|
||||
For some situations like an open-source contribution day, using an off-the-shelf solution would probably be a better option, and some teams will have their own preferences, but I prefer to use Docker and Docker Compose when I can.
|
||||
|
||||
Personally, I like to invest time into learning tools that provide reusable knowledge, such as Docker and Docker Compose. I'd prefer to spend time learning something, even if it may take longer compared to other tools, if it's going to give me a return on that investment in the medium- to long-term.
|
||||
|
||||
For some examples of how I work with Docker and Docker Compose, you can [see my public GitHub repositories](https://github.com/opdavies?tab=repositories&q=docker) and how things are put together there.
|
25
website/src/pages/daily-emails/2022-08-22.md
Normal file
25
website/src/pages/daily-emails/2022-08-22.md
Normal file
|
@ -0,0 +1,25 @@
|
|||
---
|
||||
permalink: archive/2022/08/22/2022-08-22
|
||||
pubDate: 2022-08-22
|
||||
title: "Being a T-shaped Developer"
|
||||
---
|
||||
|
||||
A blog post appeared on my feed this morning, titled [How to be T-Shaped](https://www.nomensa.com/blog/how-to-be-t-shaped).
|
||||
|
||||
"T-shaped Developers" is a term that I've also used before. Being T-shaped means that you have a deep knowledge in one particular area and a breadth of knowledge in other areas.
|
||||
|
||||
I would say that I'm T-shaped.
|
||||
|
||||
My main area of knowledge is PHP and Drupal software development - they're the programming language and content management system that I've used throughout most of my career so far, since I started in 2007.
|
||||
|
||||
As I worked on my own personal and client projects, I needed to learn more complementary skills.
|
||||
|
||||
I needed to learn how to style websites and build themes so I started to learn front-end development with CSS and frameworks like Bootstrap, Bulma and Tailwind CSS, and JavaScript frameworks like Angular, Vue.js and Alpine, as well as TypeScript.
|
||||
|
||||
I also needed to host these projects somewhere, which introduced me to Linux servers, virtual hosts, (S)FTP and SSL, web servers like Apache, Nginx and Caddy, MySQL and MariaDB databases, and as projects got more complicated, I started using tools like Vagrant and Puppet, Ansible, and Docker for configuring environments to work in.
|
||||
|
||||
I don't use Drupal for every project. I've used static site generators and frameworks like Symfony based on the project's requirements, and have projects that use several different technologies at the same time.
|
||||
|
||||
The main benefits are that I can either deliver entire projects or projects with more complicated architectures, or work across different teams - mentoring a team of Front-End Developers in Drupal theming, or working with System Administrators to start hosting PHP applications. Having these additional skills is definitely valuable to employers and clients.
|
||||
|
||||
I've said that one of the best and worst things about software development is that there's always something new to learn!
|
31
website/src/pages/daily-emails/2022-08-23.md
Normal file
31
website/src/pages/daily-emails/2022-08-23.md
Normal file
|
@ -0,0 +1,31 @@
|
|||
---
|
||||
pubDate: 2022-08-23
|
||||
title: "Git: GUI or command-line?"
|
||||
permalink: "archive/2022/08/23/git-gui-command-line"
|
||||
tags:
|
||||
- "git"
|
||||
---
|
||||
|
||||
I’ve been using Git for a long time. My first full-time Developer role in 2010 was working on an in-house team and that project used Git as it’s version control system.
|
||||
|
||||
I remember typing commands into an Ubuntu terminal and trying to wrap my head around the process of adding and staging files, (sometimes) pulling, and then pushing to a remote. I think the remote was a simple bare repository on a server, so there was no UI like there is in GitHub and similar tools today.
|
||||
|
||||
In fact, GitHub only started two years earlier in 2008, and GitLab wasn’t around until 2014.
|
||||
|
||||
Looking back, my introduction to Git as a Junior Developer wasn't easy and I remember starting to get frustrated until it eventually "clicked" and made sense.
|
||||
|
||||
I don't remember if there were GUIs at that time (I remember using gitk but I can't think when), but having a tool like GitHub where I could see the code, branches and commits, would probably have been helpful with my initial learning.
|
||||
|
||||
Whilst working locally, I've tried some of the desktop GUI tools like Sourcetree, gitkraken and Tower, but I always come back to using Git on the command line.
|
||||
|
||||
While a Git GUI tool may make it easier to learn Git initially as a Junior Developer, I'd recommend trying to learn the command line too.
|
||||
|
||||
In my opinion, understanding what’s happening "under the hood" when is important working with a GUI - just in case you find yourself unexpectedly having to use the command line. I’ve seen an error in a Git GUI that suggests running commands in the terminal to debug or fix the issue. If you aren't familiar with the terminal commands or what they do, then I'd expect this to be intimidating and confusing.
|
||||
|
||||
If you're working as part of a team or contributing to an open-source project then the consistency that the command line provides will make it easier when working with colleagues or getting help from project maintainers. You're also learning Git itself rather than a tool that may add it's own terminology or change how Git itself works, also causing confusion.
|
||||
|
||||
There's a lot of Git functionality and concepts that I wouldn't have explored if I wasn't using the command line and relying on a GUI, such as adding and removing code in chunks using patch mode, using bisect to find when a bug was introduced, worktrees for local code organisation, and understanding merging vs rebasing, interactive and non-interactive rebases, and merge commits and fast-forward merges.
|
||||
|
||||
Of course, if you prefer to use a GUI and it works for you, then that's fine. Personally, I like to dig deep when learning tools, to know them inside-out and understand how to use them well, and I think that the time that I've spent learning Git and optimising my workflow paid for itself a long time ago.
|
||||
|
||||
How do you like to use Git? Do you prefer to use the command line or a GUI tool? Reply to this email and let me know.
|
51
website/src/pages/daily-emails/2022-08-24.md
Normal file
51
website/src/pages/daily-emails/2022-08-24.md
Normal file
|
@ -0,0 +1,51 @@
|
|||
---
|
||||
permalink: archive/2022/08/24/2022-08-24
|
||||
pubDate: 2022-08-24
|
||||
title: "How I've configured Git"
|
||||
tags:
|
||||
- "git"
|
||||
---
|
||||
|
||||
After yesterday's post on why I prefer using Git on the command line rather than using a GUI tool, today I thought that I'd post about how I've configured Git.
|
||||
|
||||
First, I rarely ever run the `git` command - I usually run a `g` function that I've created within my zsh configuration.
|
||||
|
||||
Rather than being an simple alias, it's a shell function that will run `git status -sb` to show the current status of the repository if there are no additional arguments. If there are, such as when running `g add`, then this is executed as a normal Git command. (This is something that I first saw from Thoughtbot, if I remember correctly).
|
||||
|
||||
## Using .gitconfig
|
||||
|
||||
The main part of my configuration is within Git's `~/.gitconfig` file, where I can configure Git to work how I want.
|
||||
|
||||
For example, I like to avoid merge conflicts, so I always want to use fast-forward merges whilst pulling and also to rebase by default. I can do this by adding `ff = only` and `rebase = true` to the `[pull]` section of my `~/.gitconfig` file.
|
||||
|
||||
I can do this manually, or running `git config --global pull.rebase true` will set the option but also update the file automatically.
|
||||
|
||||
Some of the tweaks that I've made are to only allow fast-forward merges by adding `merge.ff = only`, automatically squash commits when rebasing by setting `rebase.autosquash = true`, and automatically pruning branches by adding `fetch.prune = true`.
|
||||
|
||||
### Simple aliases
|
||||
|
||||
Another way that I configure Git is using aliases, which are also within the `~/.gitconfig` file.
|
||||
|
||||
For example, if I ran `git config --global alias.b "branch"`, then running `git b` would just run `git branch` which shortens the command and saves some time and keystrokes.
|
||||
|
||||
I have similar one- or two letter "short" aliases for pushing and pulling code, and some that also set some additional arguments such as `aa` for `add --all` and `worktrees` for `worktree list`.
|
||||
|
||||
### More complicated aliases
|
||||
|
||||
Aliases can be more complex if needed by prefixing it with a `!`, meaning that it executes it as a shell command.
|
||||
|
||||
This means that I can have `repush = !git pull --rebase && git push` to chain two separate Git commands and combine them into one, and `ureset = !git reset --hard $(git upstream)` which executes the full command, including another alias as part of it.
|
||||
|
||||
I also have `issues = !gh issue list --web` and `pulls = !gh pr list --web` to open the current repository's GitHub issues or pull requests respectively, which can be done as it's not limited to just running `git` commands.
|
||||
|
||||
### Custom functions
|
||||
|
||||
Finally, if an alias is getting too long or complex, then it can extracted to it's own file.
|
||||
|
||||
Any executable file within your `$PATH` that starts with `git-` will automatically become a Git command.
|
||||
|
||||
One example that I have is [git-cm](https://github.com/opdavies/dotfiles/blob/2b20cd1e59ae3b1fa81074077e855cbdfa02f146/bin/bin/git-cm) which, similar to the `g` function`, is a bash script that checks for any arguments passed to it and runs a slightly different command. It achieves the same thing as if it were an alias, but it does make it easier to write and maintain as it's in a separate file.
|
||||
|
||||
These are just some examples. If you want to see my entire configuration, then check out [my dotfiles repository on GitHub](https://github.com/opdavies/dotfiles/tree/2b20cd1e59ae3b1fa81074077e855cbdfa02f146/roles/git/files).
|
||||
|
||||
How have you configured Git for your workflow? Reply to this email and let me know.
|
24
website/src/pages/daily-emails/2022-08-25.md
Normal file
24
website/src/pages/daily-emails/2022-08-25.md
Normal file
|
@ -0,0 +1,24 @@
|
|||
---
|
||||
pubDate: 2022-08-25
|
||||
title: "Why I work in Neovim"
|
||||
tags: ["vim", "neovim"]
|
||||
permalink: "archive/2022/08/25/why-i-work-in-neovim"
|
||||
---
|
||||
|
||||
Over a year ago, I posted that I was [switching to using Neovim full-time]({{site.url}}/blog/going-full-vim) for my development work.
|
||||
|
||||
I'd used Vim one file at a time on remote servers, and added Vim plugins in other IDEs and editors, so I was already familiar with a lot of the key bindings and motions before I decided to use it full-time.
|
||||
|
||||
Still, it was tough to begin with, but once I'd learned how to configure Neovim, I also learned that being able to customise and extend it as much as you need to is one of its main advantages compared to other IDEs and code editors.
|
||||
|
||||
TJ DeVries - a Neovim core team member - has recently coined the term "PDE" (a personalised development environment) which, for me, describes Neovim perfectly.
|
||||
|
||||
Currently, I have a fuzzy-finder to quickly open files (as well as many other things), an LSP client to add code intelesense, auto-completion, refactoring tools, custom snippets, and very recently, a database client and a HTTP client.
|
||||
|
||||
Just as important to me, I've found a growing community of other Neovim users who stream on Twitch, post YouTube videos, write blog posts, or publish their dotfiles for others to see and reference.
|
||||
|
||||
I've learned Lua. Not just for my own Neovim configuration, but I recently wrote and open-sourced my own simple plugin.
|
||||
|
||||
Like Git, I enjoy and prefer using tools that I can configure and adapt to my workflow.
|
||||
|
||||
Given Neovim's flexibility and configurability, its expanding feature set both in core and community plugins, and the growing community, I think that Neovim is going to be something that I continue to use and adapt for a long time.
|
19
website/src/pages/daily-emails/2022-08-26.md
Normal file
19
website/src/pages/daily-emails/2022-08-26.md
Normal file
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
pubDate: 2022-08-26
|
||||
title: "Always be learning"
|
||||
permalink: "archive/2022/08/26/always-be-learning"
|
||||
---
|
||||
|
||||
I've been a Developer for 15 years and one thing that I've always focussed on is to always keep learning.
|
||||
|
||||
From starting as a self-taught Developer, initially learning HTML and CSS, to later learning PHP and Drupal as well as other languages, frameworks and tools.
|
||||
|
||||
For the last couple of days, I've been experimenting with Next.js - a React-based web framework. I hadn't used React before and have typically reached for Vue.js or sometimes Alpine.js based on what I needed to do. However, I'm always looking for opportunities to learn and implement new things, and see how I can use them in any of my projects.
|
||||
|
||||
This afternoon, I started a new Next.js and TypeScript project, and refactored a small codebase that used a static site generator to create a small number of landing pages from Markdown files.
|
||||
|
||||
It took me a short time to set up a Docker environment for it based on some of my Vue.js projects, ported across the application to recreate the pages, and finally, updated the CI pipeline that generated the static pages and uploaded them to an S3 bucket.
|
||||
|
||||
The end result is the same - the same HTML pages are generated and uploaded - but, for me, trying and experimenting with new things keeps my work interesting and my knowledge fresh, which benefits me as well as my colleagues and clients.
|
||||
|
||||
As I said in a previous email, one of the great things about software development is that there's always something new to learn.
|
15
website/src/pages/daily-emails/2022-08-27.md
Normal file
15
website/src/pages/daily-emails/2022-08-27.md
Normal file
|
@ -0,0 +1,15 @@
|
|||
---
|
||||
pubDate: 2022-08-27
|
||||
title: "Giving back"
|
||||
permalink: "archive/2022/08/27/giving-back"
|
||||
---
|
||||
|
||||
Today, I've been at an event run by a local animal rescue charity. It's one that we attend often as my children like to enter the dog show, but this year, I've also sponsored one of the categories.
|
||||
|
||||
As well as organising the PHP South Wales user group, I'm also now a sponsor - donating books and elePHPant plushies for raffle prizes and paying the group's Meetup.com subscription costs.
|
||||
|
||||
Giving back and supporting open-source maintainers and content creators is a big priority of mine. If I use some open-source software or find that someone's Twitch or YouTube channel is useful, if that person or organisation is on GitHub or Patron, then I'll sponsor them, or I'll subscribe to their channel.
|
||||
|
||||
If I find a useful blog post or video, I'll add a comment or link to it on Twitter, thanking them and letting them know that it helped me.
|
||||
|
||||
Especially if it's something that I've used within my projects, it makes sense to support it and it's maintainers, so that they keep working on and improving the software, continue streaming, and keep writing blog posts and recording videos for me to learn from.
|
27
website/src/pages/daily-emails/2022-08-28.md
Normal file
27
website/src/pages/daily-emails/2022-08-28.md
Normal file
|
@ -0,0 +1,27 @@
|
|||
---
|
||||
pubDate: 2022-08-28
|
||||
title: "How I started programming"
|
||||
permalink: "archive/2022-08-28/how-started-programming"
|
||||
---
|
||||
|
||||
In 2007, I was working in the IT sector in a Desktop Support role but hadn't done any coding professionally.
|
||||
|
||||
In my spare time, I was a black belt in Tae Kwon-Do and enjoyed training at a few different schools. Because of my IT experience, I was asked if I could create a website for one of the schools - somewhere that we could post information and class times for new starters, as well as news articles and competition results.
|
||||
|
||||
This would be my introduction to programming.
|
||||
|
||||
I started learning what I needed to know, starting with HTML and CSS - experimenting with a template that I found online and was able to tweak to match the school's colours.
|
||||
|
||||
I was able to complete the first version of the website with static HTML pages and CSS but had to manually create a new HTML page for every new news article and edit existing pages manually.
|
||||
|
||||
I wanted to make it more dynamic, and started to learn about PHP and MySQL from video courses and online forums.
|
||||
|
||||
After posting a question about some PHP code that I'd written, someone suggested that I look at content management systems - namely Drupal, which was used for that forum (I have [a screenshot of the reply](https://twitter.com/opdavies/status/1185456825103241216)). This was a new concept to me as until that point, I'd written everything so far myself whilst learning it.
|
||||
|
||||
I remember evaluating Drupal alongside some others - rebuilding the same website a few different times, but stuck with Drupal and relaunched it on Drupal 6 and a custom theme that I'd created from the original templates.
|
||||
|
||||
I signed up for a Drupal.org account, started to do some freelance work for a local web design agency, and built a new website for a local cattery.
|
||||
|
||||
I started blogging, attending meetups, and when an opportunity to switch careers to software development came along, I applied for and got the job.
|
||||
|
||||
That job was also using Drupal and, in another email, I'll write more about why I still like and use Drupal years later.
|
22
website/src/pages/daily-emails/2022-08-29.md
Normal file
22
website/src/pages/daily-emails/2022-08-29.md
Normal file
|
@ -0,0 +1,22 @@
|
|||
---
|
||||
pubDate: 2022-08-29
|
||||
title: "Why I like Drupal"
|
||||
permalink: "archive/2022/08/29/why-like-drupal"
|
||||
tags: ["drupal"]
|
||||
---
|
||||
|
||||
As I said in yesterday's email, I developed my first website project on Drupal. It allowed me to take a static HTML and CSS website and convert it into something that was much easier and quicker for me to update, and allowed me to create more users with permissions to do those tasks too.
|
||||
|
||||
I worked on various Drupal projects, and my first full-time job was on an in-house team where we maintained and enhanced a Drupal 6 website.
|
||||
|
||||
I've since used Drupal for projects of all shapes and sizes with different levels of complexity. Everything from a simple brochure website to large and complex, multilingual, API-driven projects.
|
||||
|
||||
I've been able to build eCommerce websites with Drupal using Ubercart and Drupal Commerce. I've built traditional stores where customers purchase physical products, a photography competition website with custom judging functionality, a site for purchasing commercial and residential property and land searches, and a fully-fledged events booking and management platform.
|
||||
|
||||
Whatever the size and complexity of the project, Drupal is flexible enough to fit it.
|
||||
|
||||
I've loved some of the ecosystem improvements within the last few years. Moving to object-orientated code by default, integrating code from other projects like Symfony, shipping new features every six months as part of the new release cycle, and embracing tools like Composer, PHPStan and Rector.
|
||||
|
||||
I also love being part of the Drupal community. Collaborating on tasks, speaking on Slack, and attending events like DrupalCon where I've been lucky enough to attend, speak and mentor.
|
||||
|
||||
Although Drupal is my specialty and the tool that I've used the most, I don't use it exclusively. I'll talk more about this in tomorrow's email.
|
24
website/src/pages/daily-emails/2022-08-30.md
Normal file
24
website/src/pages/daily-emails/2022-08-30.md
Normal file
|
@ -0,0 +1,24 @@
|
|||
---
|
||||
pubDate: 2022-08-30
|
||||
title: "Why I don't only use Drupal"
|
||||
permalink: "archive/2022/08/30/why-dont-only-use-drupal"
|
||||
tags: ["drupal"]
|
||||
---
|
||||
|
||||
Yesterday, [I shared some of the reasons]({{site.url}}/archive/2022/08/29/why-like-drupal) why I like Drupal and why I use it for the majority of my projects. But, as I said, I don't use it exclusively and for some projects I used various different tools.
|
||||
|
||||
Essentially, I always try to recommend and use the best tool for the job.
|
||||
|
||||
I previously interviewed for a job and was asked to complete a coding test. The role was mostly Drupal-focussed, but as the test asked for a command-line application, I completed it using Symfony and Symfony Console, and was able to discuss why I'd made that decision. In my opinion, it was the best choice based on the requirements.
|
||||
|
||||
This is the same approach that I use when making recommendations for a new project.
|
||||
|
||||
I've delivered projects using other tools like the Symfony framework or a static site generator, as long as it fitted the requirements.
|
||||
|
||||
If there's a SaaS solution that can be used instead, or an off-the-shelf tool that can be integrated instead of writing a custom solution, then that should be evaluated.
|
||||
|
||||
There may be other constraints like budgets or deadlines to consider - maybe something can be delivered faster or cheaper using a particular technology, even if it's not the final solution.
|
||||
|
||||
There are situations though where a tool may be the best choice even though it's not the ideal fit based purely on the technical requirements. Maybe the client is already familiar with publishing content in Drupal, or an in-house development team is used to working with a certain tool or language. In that case, those things should be considered too.
|
||||
|
||||
Also, for me, having a chance to evaluate other technologies and explore what's happening outside of the Drupal ecosystem is a good opportunity. A lot of what I've learned about automated testing, for example, is from the wider PHP and JavaScript communities, as well as tools like [Tailwind CSS]({{site.url}}/talks/taking-flight-with-tailwind-css) and [Illuminate Collections]({{site.url}}//talks/using-illuminate-collections-outside-laravel) that I've been able to bring back into my other Drupal projects.
|
40
website/src/pages/daily-emails/2022-09-01.md
Normal file
40
website/src/pages/daily-emails/2022-09-01.md
Normal file
|
@ -0,0 +1,40 @@
|
|||
---
|
||||
pubDate: 2022-09-01
|
||||
title: "Conventional commits and CHANGELOGs"
|
||||
tags: []
|
||||
permalink: "archive/2022/09/01/conventional-commits-changelogs"
|
||||
---
|
||||
|
||||
One of the things that I've done since joining my current team is to implement a standard approach for our commit messages.
|
||||
|
||||
We're using the [Conventional Commits specification](https://www.conventionalcommits.org), which gives some additional rules to follow when writing commit messages.
|
||||
|
||||
For example:
|
||||
|
||||
```
|
||||
build(deps): update Drupal to 9.4.5
|
||||
|
||||
Updated Drupal's `drupal/core-*` packages to 9.4.5.
|
||||
|
||||
See https://www.drupal.org/project/drupal/releases/9.4.5.
|
||||
|
||||
Refs: #123
|
||||
```
|
||||
|
||||
We can see that this is a `build` task that relates to our project dependencies, in this example, we're updating Drupal core. We can also see this in the subject line.
|
||||
|
||||
In the commit body, I add as much information as possible to do with the change and include any relevant links, just in case I need to refer to them again, and the list the names of anyone else who worked with me. I also typically include any ticket numbers or links in the commit footer.
|
||||
|
||||
So far, I've mostly used the `build`, `chore`, `ci`, `docs` and `refactor` commit types, which are types that are recommended and used by [the Angular convention](https://github.com/angular/angular/blob/22b96b9/CONTRIBUTING.md#-commit-message-guidelines).
|
||||
|
||||
Following this standard means that it's very easy to look at the Git log and see what type of changes are going to be included within a release and, if you're using scopes, which part of the application are affected.
|
||||
|
||||
Conventional commits also works nicely with something else that we've introduced, which is a CHANGELOG file.
|
||||
|
||||
There are tools that can generate and update CHANGELOGs automatically from conventional commits, but so far, we've been following the [Keep a Changelog](https://keepachangelog.com) format.
|
||||
|
||||
It's easy to match the commits to the `Added`, `Changed` or `Fixed` types, and although it needs to be updated manually, it's easy to add to the `Unreleased` section of the file and re-organise everything within the appropriate headings as needed as part of a release.
|
||||
|
||||
What I like about this format is that it's more human-friendly and gives a higher level overview of the changes rather than a reformatted Git log.
|
||||
|
||||
As we do trunk-based development and continuous integration on our projects, there can be numerous commits related to the same change, so I'd rather only see a single line in the CHANGELOG for each change. This also makes it easier to share the CHANGELOG file with others, and we can still view and grep the Git log to see the individual commits if we need to.
|
22
website/src/pages/daily-emails/2022-09-02.md
Normal file
22
website/src/pages/daily-emails/2022-09-02.md
Normal file
|
@ -0,0 +1,22 @@
|
|||
---
|
||||
title: "Automating all the things with Ansible"
|
||||
pubDate: "2022-09-02"
|
||||
permalink: "archive/2022/09/02/automating-all-the-things-with-ansible"
|
||||
tags: ["ansible"]
|
||||
---
|
||||
|
||||
Ansible is a tool for automating IT tasks. It's one of my preferred tools to use, and one that I've written about and [presented talks on]({{site.url}}/talks/deploying-php-ansible-ansistrano) previously.
|
||||
|
||||
It's typically thought of as a tool for managing configuration on servers. For example. you have a new VPS that you want to use as a web server, so it needs Nginx, MySQL, PHP, etc to be installed - or whatever your application uses. You define the desired state and run Ansible, which will perform whatever tasks are needed to get to that state.
|
||||
|
||||
Ansible though does include modules for interacting with services like Amazon AWS and DigitalOcean to create the servers and resources, and not just configure them.
|
||||
|
||||
It also doesn't just work on servers. I use Ansible to configure my local development environment, to ensure that dependencies and tools are installed, and requirements like my SSH keys and configuration are present and correct.
|
||||
|
||||
Lastly, I use Ansible to deploy application code onto servers and automatically run any required steps, ensuring that deployments are simple, robust and repeatable.
|
||||
|
||||
In the next few emails, I'll explain how I've been able to utilise Ansible for each of these situations.
|
||||
|
||||
---
|
||||
|
||||
Want to learn more about how I use Ansible? [Register for my upcoming free email course]({{site.url}}/ansible-course).
|
57
website/src/pages/daily-emails/2022-09-03.md
Normal file
57
website/src/pages/daily-emails/2022-09-03.md
Normal file
|
@ -0,0 +1,57 @@
|
|||
---
|
||||
pubDate: 2022-09-03
|
||||
title: Creating infrastructure with Ansible
|
||||
permalink: archives/2022/09/03/creating-infrastructure-with-ansible
|
||||
tags: ["ansible"]
|
||||
---
|
||||
|
||||
Let's start at the beginning.
|
||||
|
||||
If we want to automate our infrastructure then we first need to create it. This could be done manually or we can automate it.
|
||||
|
||||
Popular tools for this include Terraform and Pulumi, but Ansible also includes modules to interface with hosting providers such as Amazon Web Services, Microsoft Azure, DigitalOcean, and Linode.
|
||||
|
||||
By using one of these tools, you can programatically provision a new, blank server that is ready for you to be configered.
|
||||
|
||||
For example, to [create a DigitalOcean droplet](https://docs.ansible.com/ansible/latest/collections/community/digitalocean/digital_ocean_module.htm):
|
||||
|
||||
```yaml
|
||||
---
|
||||
- community.digitalocean.digital_ocean_droplet:
|
||||
image: ubuntu-20-04-x64
|
||||
name: mydroplet
|
||||
oauth_token: "..."
|
||||
region: sfo3
|
||||
size: s-1vcpu-1gb
|
||||
ssh_keys: [ .... ]
|
||||
state: present
|
||||
wait_timeout: 500
|
||||
register: my_droplet
|
||||
```
|
||||
|
||||
Running this playbook will create a new Droplet with the specified name, size, and operating system, and within the specified region.
|
||||
|
||||
If you needed to create a separate database server or another server for a new environment, then the file can be updated and re-run.
|
||||
|
||||
[Creating an Amazon EC2 instance](https://docs.ansible.com/ansible/latest/collections/amazon/aws/ec2_instance_module.html#ansible-collections-amazon-aws-ec2-instance-module) looks very similar:
|
||||
|
||||
```yaml
|
||||
---
|
||||
- amazon.aws.ec2_instance:
|
||||
image_id: ami-123456
|
||||
instance_type: c5.large
|
||||
key_name: "prod-ssh-key"
|
||||
name: "public-compute-instance"
|
||||
network:
|
||||
assign_public_ip: true
|
||||
security_group: default
|
||||
vpc_subnet_id: subnet-5ca1ab1e
|
||||
```
|
||||
|
||||
This doesn't apply just to servers - you can also use Ansible to create security groups and S3 buckets, manage SSH keys, firewalls, and load balancers.
|
||||
|
||||
Once we have our infrastructure in place, we can start using Ansible to set and manage its configuration, which we'll do in tomorrow's email.
|
||||
|
||||
---
|
||||
|
||||
Want to learn more about how I use Ansible? [Register for my upcoming free email course]({{site.url}}/ansible-course).
|
23
website/src/pages/daily-emails/2022-09-04.md
Normal file
23
website/src/pages/daily-emails/2022-09-04.md
Normal file
|
@ -0,0 +1,23 @@
|
|||
---
|
||||
title: "Using Ansible for server configuration"
|
||||
pubDate: "2022-09-04"
|
||||
permalink: "archive/2022/09/04/using-ansible-for-server-configuration"
|
||||
---
|
||||
|
||||
[In yesterday's email]({{site.url}}/archives/2022/09/03/creating-infrastructure-with-ansible), I described how to set up a blank server with Ansible.
|
||||
|
||||
Now that we've done that, it needs to be configured.
|
||||
|
||||
Once the server’s IP address or hostname has been added to a `hosts.ini` file, you can run ad-hoc commands against it - such as `ansible all -i hosts.ini -m ping` to run Ansible's `ping` module on all of the hosts in your inventory and check that you can connect to them.
|
||||
|
||||
Another useful one that you can use is the `shell` module, that runs ad-hoc run commands on each host. If you need to check the uptime of each of your servers, run `ansible all -i hosts.ini -m shell -a uptime`. You can replace the last argument with any other shell command that you need to run, like `df` or `free`.
|
||||
|
||||
Running commands in this way is great for getting started, for routine maintenance, or an emergency free disk space check, but for more complex tasks like configuration management, using playbooks is the better option. They are YAML files that contain lists of tasks that Ansible will run through and execute in order.
|
||||
|
||||
If you have a group of related tasks, such as for installing a piece of software, then you can combine them into roles. In fact, Ansible Galaxy has thousands of pre-built collections and roles that you can download, include in your playbooks, configure, and run.
|
||||
|
||||
Very quickly, you can get a full stack installed and configured - ready to serve your application.
|
||||
|
||||
---
|
||||
|
||||
Want to learn more about how I use Ansible? [Register for my upcoming free email course]({{site.url}}/ansible-course).
|
25
website/src/pages/daily-emails/2022-09-05.md
Normal file
25
website/src/pages/daily-emails/2022-09-05.md
Normal file
|
@ -0,0 +1,25 @@
|
|||
---
|
||||
title: "Using Ansible for local environment configuration"
|
||||
pubDate: "2022-09-05"
|
||||
permalink: "archive/2022/09/05/using-ansible-for-local-configuration"
|
||||
---
|
||||
|
||||
As well as [configuring servers]({{site.url}}/archive/2022/09/04/using-ansible-for-server-configuration), you can use Ansible to configure your own local machine and development environment.
|
||||
|
||||
The change that you need to make is within the `hosts.ini` file:
|
||||
|
||||
```
|
||||
127.0.0.1 ansible_connection=local
|
||||
```
|
||||
|
||||
Instead of the server's IP address or hostname, use the localhost IP address and set `ansible_connection` to `local` to tell Ansible to run locally instead of using an SSH connection.
|
||||
|
||||
Another way to do this is to set `hosts: 127.0.0.1` and `connection: true` in your playbook.
|
||||
|
||||
Once this is done, you can run tasks, roles, and collections to automate tasks such as installing software, adding your SSH keys, configuring your project directories, and anything else that you need to do.
|
||||
|
||||
For an example of this, you can see [my dotfiles repository on GitHub](https://github.com/opdavies/dotfiles).
|
||||
|
||||
---
|
||||
|
||||
Want to learn more about how I use Ansible? [Register for my upcoming free email course]({{site.url}}/ansible-course).
|
26
website/src/pages/daily-emails/2022-09-06.md
Normal file
26
website/src/pages/daily-emails/2022-09-06.md
Normal file
|
@ -0,0 +1,26 @@
|
|||
---
|
||||
title: "Deploying applications with Ansible"
|
||||
pubDate: "2022-09-06"
|
||||
permalink: "archive/2022/09/06/deploying-applications-with-ansible"
|
||||
---
|
||||
|
||||
The last few days' emails have been about using Ansible to create and configure infrastructure, but it can also be used to deploy application code.
|
||||
|
||||
The simplest way being that an artifact is built locally - e.g. a directory of static HTML pages from a static site generator - and uploaded onto the server, and for this you could use Ansible's `synchronize` module.
|
||||
|
||||
It's a wrapper around the `rsync` command and makes it as simple as specifying `src` and `dest` values for the local and remote paths.
|
||||
|
||||
For more complicated deployments, I like to use a tool called Ansistrano - an Ansible port of a deployment tool called Capistrano.
|
||||
|
||||
It creates a new directory for each release and updates a `current` symlink to identify and serve the current release, and can share files and directories between releases.
|
||||
|
||||
As well as being able to configure settings such as the deployment strategy, how many old releases to keep, and even the directory and symlink names, there are a number of hooks that you can listen for an add your own steps as playbooks so you can install dependencies, generate assets, run migrations, or rebuild a cache as part of each deployment.
|
||||
|
||||
If you're running your applications in Docker, you could use Ansible to pull the latest images and restart your applications.
|
||||
|
||||
For more information and examples, I've given a talk on Ansible at various PHP events, which covers some Ansible basics before moving on to [deploying applications with Ansistrano]({{site.url}}/talks/deploying-php-ansible-ansistrano).
|
||||
|
||||
---
|
||||
|
||||
Want to learn more about how I use Ansible? [Register for my upcoming free email course]({{site.url}}/ansible-course).
|
||||
|
30
website/src/pages/daily-emails/2022-09-07.md
Normal file
30
website/src/pages/daily-emails/2022-09-07.md
Normal file
|
@ -0,0 +1,30 @@
|
|||
---
|
||||
title: "My Tailwind CSS origin story"
|
||||
pubDate: "2022-09-07"
|
||||
permalink: "archive/2022/09/07/my-tailwind-css-origin-story"
|
||||
tags: ["tailwind-css"]
|
||||
---
|
||||
|
||||
Tomorrow night, I'm attending one of Simon Vrachliotis (simonswiss)'s Pro Tailwind workshops, so I thought that it would be a good time, as Simon has done himself recently on the Navbar podcast, to describe how I started using Tailwind CSS.
|
||||
|
||||
I remember watching a lot of Adam Wathan's live streams on YouTube before Tailwind CSS, and I remember when he started a new project - a SaaS product called KiteTail.
|
||||
|
||||
It was a Laravel and Vue.js project, and although I'm not a Laravel Developer primarily, I got a lot of other information from Adam's streams about automated testing, test-driven development, and Vue.js as I was learning Vue at the time.
|
||||
|
||||
One of the episodes was about styling a card component using some styles that Adam was copying between projects - which would eventually be the starting point for Tailwind CSS.
|
||||
|
||||
In fact, I think I watched some of the episode and stopped as I was happy with the Sass and BEM or SMACSS approach that I was using at the time, and didn't initially see the value of the utility CSS approach that I was seeing for the first time (everyone has a similar reaction initially).
|
||||
|
||||
After a while, I did re-visit it but because Tailwind CSS wasn't released as it's own project yet, I (like Simon) started to experiment with Tachyons - another utility CSS library.
|
||||
|
||||
I rebuilt a particularly tricky component that I'd just finished working on and had caused me some issues, and managed to re-do it in only a few minutes.
|
||||
|
||||
I started to use Tachyons on some personal and client projects as a layer on other frameworks like Bootstrap and Bulma, and later moved on to Tailwind CSS once it has been released.
|
||||
|
||||
I was working in this way on a project when I released that I could use Tailwind for all of the styling instead of just adding small sprinklings of utilities here and there. I refactored everything and removed the other framework that I'd been using - leaving just Tailwind CSS.
|
||||
|
||||
With the exception of some legacy projects, now I use Tailwind CSS exclusively and have used it for a number of projects. I've given lunch and learn sessions to teams that I've worked on, [presented a Tailwind CSS talk]({{site.url}}/talks/taking-flight-tailwind-css) at a number of PHP, Drupal, WordPress, and JavaScript events, and maintain [a starter-kit theme](https://www.drupal.org/project/tailwindcss) for using Tailwind in custom Drupal themes.
|
||||
|
||||
I've also rebuilt a [number of existing sites]({{site.url}}/blog/uis-ive-rebuilt-tailwind-css) as examples and written some [Tailwind CSS related blog posts]({{site.url}}/blog/tags/tailwind-css).
|
||||
|
||||
I'm looking forward to attending Simon's workshop tomorrow and quickly putting that knowledge to use in the next phase of a project that I'm currently working on.
|
34
website/src/pages/daily-emails/2022-09-08.md
Normal file
34
website/src/pages/daily-emails/2022-09-08.md
Normal file
|
@ -0,0 +1,34 @@
|
|||
---
|
||||
title: "Keeping secrets with Ansible Vault"
|
||||
pubDate: "2022-09-08"
|
||||
permalink: "archive/2022/09/08/keeping-secrets-with-ansible-vault"
|
||||
tags: ["ansible"]
|
||||
---
|
||||
|
||||
In the last few posts, I've talked about using Ansible for configuring servers and local environments, during both of which, you're likely to have some sensitive or secret values. These could be database credentials within your application and on your server, and your SSH private keys within your local environment.
|
||||
|
||||
Rather than committing these to a code repository in plain text, Ansible includes the `ansible-vault` command to encrypt values.
|
||||
|
||||
To see this working, run `ansible-vault encrypt_string my-secret-password`, enter a password, and then you should see something like this:
|
||||
|
||||
```
|
||||
!vault |
|
||||
$ANSIBLE_VAULT;1.1;AES256
|
||||
33353031663366313132333831343930643830346531666564363562666136383838343235646661
|
||||
6336326637333230396133393936646636346230623932650a333035303265383437633032326566
|
||||
38616262653933353033376161633961323666366132633033633933653763373539613434333039
|
||||
6132623630643261300a346438636332613963623231623161626133393464643634663735303664
|
||||
66306433633363643561316362663464646139626533323363663337363361633333
|
||||
```
|
||||
|
||||
This is the encrypted version of that password, and this could be committed and pushed to a code repository.
|
||||
|
||||
You can use it within a playbook, and you'll be prompted to re-enter the password so that Ansible can decrypt and use it.
|
||||
|
||||
Rather than a single string, you could have a file of variables that you want to encrypt. You can do this by running `ansible-vault encrypt vault.yml` and include it as before. Again, you'll be prompted by Ansible so that it can decrypt and use the values.
|
||||
|
||||
For an example of how I'm using Ansible Vault, see [the Dransible repository](https://github.com/opdavies/dransible/tree/986ba5097d62ff4cd0e637d40181bab2c4417f2e/tools/ansible) on GitHub or my [ Deploying PHP applications with Ansible, Ansible Vault and Ansistrano]({{site.url}}/talks/deploying-php-ansible-ansistrano) talk.
|
||||
|
||||
---
|
||||
|
||||
Want to learn more about how I use Ansible? [Register for my upcoming free email course]({{site.url}}/ansible-course).
|
20
website/src/pages/daily-emails/2022-09-09.md
Normal file
20
website/src/pages/daily-emails/2022-09-09.md
Normal file
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
title: "Refactoring a Tailwind CSS component"
|
||||
pubDate: "2022-09-09"
|
||||
permalink: "archive/2022/09/09/refactoring-tailwind-component"
|
||||
tags: ["tailwind-css"]
|
||||
---
|
||||
|
||||
After last night's Pro Tailwind theming workshop, I decided to revisit and refactor some similar code that I'd worked on before.
|
||||
|
||||
It was a demo for a presentation on utility-first CSS and Tailwind whilst I was at Inviqa.
|
||||
|
||||
I'd taken one of the components from the website that we'd lauched and rebuilt it - in particular to show how Tailwind could be used for responsive and themeable components.
|
||||
|
||||
[The original version](https://play.tailwindcss.com/Yfmw8O5UNN) was written in Tailwind 1 and used custom CSS with `@apply` rules to include text or background colours to elements based on the theme being used on that page or component.
|
||||
|
||||
As well as moving it into a Next.js application, [the new version](https://github.com/opdavies/inviqa-tailwindcss-example) uses techniques covered in Simon's workshop - using CSS custom properties (aka variables) to override the colours, and writing custom plugins to generate the required styles. It doesn't include everything from the workshop, but enough for this refactor.
|
||||
|
||||
I also moved the `flex-basis` classes into their own standalone plugin and might release that as it's own open-source plugin.
|
||||
|
||||
I'm working on a client project at the moment which will need switchable themes so I'm looking forward to putting these techniques to use again in the near future.
|
38
website/src/pages/daily-emails/2022-09-10.md
Normal file
38
website/src/pages/daily-emails/2022-09-10.md
Normal file
|
@ -0,0 +1,38 @@
|
|||
---
|
||||
title: "Automating Ansible deployments in CI"
|
||||
pubDate: "2022-09-10"
|
||||
permalink: "archive/2022/09/10/automating-ansible-deployments-ci"
|
||||
tags: ["ansible"]
|
||||
---
|
||||
|
||||
Once you have a deployment that's run using Ansible, rather than running it manually, it's easy to automate it as part of a continuous integration pipeline and have your changes pushed automatically by tools like GitHub Actions and GitLab CI.
|
||||
|
||||
You'll need to configure SSH by adding a known hosts file and a private key so the tool can connect to your server, but after that, it's just running the same Ansible commands.
|
||||
|
||||
If you're using Ansistrano or other roles, you can install dependencies by using `ansible-galaxy`, and `ansible-vault` to decrypt and use any encrypted variables - securely storing the Vault password and any other secrets as environment variables within your pipeline.
|
||||
|
||||
Here's an example using GitHub Actions:
|
||||
|
||||
```
|
||||
- name: Download Ansible roles
|
||||
run: ansible-galaxy install -r requirements.yml
|
||||
|
||||
- name: Export the Ansible Vault password
|
||||
run: echo $ANSIBLE_VAULT_PASS > .vault-pass.txt
|
||||
env:
|
||||
ANSIBLE_VAULT_PASS: ${{ secrets.ANSIBLE_VAULT_PASS }}
|
||||
|
||||
- name: Deploy the code
|
||||
run: >
|
||||
ansible-playbook deploy.yml
|
||||
-i inventories/$INVENTORY_FILE.ini
|
||||
-e "project_git_branch=$GITHUB_SHA"
|
||||
--vault-password-file=.vault-pass.txt
|
||||
|
||||
- name: Remove the Ansible Vault password file
|
||||
run: rm .vault-pass.txt
|
||||
```
|
||||
|
||||
Before these steps, I've added the SSH key and determined which inventory file to use by the updated branch. The Vault password is exported and then removed once it has been used.
|
||||
|
||||
Automated tests and other code quality checks can be run in prior job, ensuring that the deployment only happens if those checks pass, but assuming that all is good, the playbook will be run and the changes will be deployed automatically.
|
62
website/src/pages/daily-emails/2022-09-11.md
Normal file
62
website/src/pages/daily-emails/2022-09-11.md
Normal file
|
@ -0,0 +1,62 @@
|
|||
---
|
||||
title: "Custom styles in Tailwind CSS: `@apply`, `theme` or custom plugins"
|
||||
pubDate: "2022-09-11"
|
||||
permalink: "archive/2022/09/11/custom-styles-tailwind-css-apply-theme-custom-plugins"
|
||||
tags: ["tailwind-css"]
|
||||
---
|
||||
|
||||
There are three ways to add custom styles to a Tailwind CSS project. As there have been [some recent tweets](https://twitter.com/adamwathan/status/1559250403547652097) around one of them - the `@apply` directive - I'd like to look at and give examples for each.
|
||||
|
||||
## What is `@apply`?
|
||||
|
||||
`@apply` is a PostCSS directive, provided by Tailwind, to allow re-using it's classes - either when extracting components or overriding third-party styles.
|
||||
|
||||
The CSS file is the same as if you were writing traditional CSS, but rather than adding declarations to a ruleset, you use the `@apply` directive and specify the Tailwind CSS class names that you want to apply.
|
||||
|
||||
For example:
|
||||
|
||||
```css
|
||||
fieldset {
|
||||
@apply bg-primary-dark;
|
||||
}
|
||||
```
|
||||
|
||||
This is a simple example but it's easy to see how this could be used in ways that weren't intended and how edge-cases can be found.
|
||||
|
||||
Adam said in a another tweet:
|
||||
|
||||
> I estimate that we spend at least $10,000/month trying to debug extremely edge-case issues people run into by using `@apply` in weird ways.
|
||||
|
||||
## Using the `theme` function
|
||||
|
||||
As well as `@apply`, Tailwind also provides a `theme` function that you can use in your CSS file. This removes the abstraction of using the class names and adds the ability to retrieve values from the `theme` section of your tailwind.config.js file.
|
||||
|
||||
```css
|
||||
fieldset {
|
||||
backgroundColor: theme('colors.primary.dark');
|
||||
}
|
||||
```
|
||||
|
||||
This seems to be the preferred approach over using `@apply`.
|
||||
|
||||
## Creating a custom plugin
|
||||
|
||||
The `theme` function is also available if you write a custom Tailwind CSS plugin:
|
||||
|
||||
```javascript
|
||||
const plugin = require('tailwindcss/plugin')
|
||||
|
||||
plugin(({ addBase, theme }) => {
|
||||
addBase({
|
||||
fieldset: {
|
||||
backgroundColor: theme('colors.primary.dark'),
|
||||
}
|
||||
})
|
||||
})
|
||||
```
|
||||
|
||||
This is an approach that I've used for [generic, open-source plugins](https://github.com/opdavies?tab=repositories&q=%23tailwindcss-plugin) but for project-specific styling, I've mostly used `@apply` or the `theme` function.
|
||||
|
||||
That said, I like the modular architecture of having different custom plugins - especially if they're separated into their own files - and being able to easily toggle plugins by simply adding to or removing from the `plugins` array.
|
||||
|
||||
I usually don't write many custom styles in a Tailwind project but I think that I'll focus on using the `theme` function going forward, either in a stylesheet or a custom plugin.
|
15
website/src/pages/daily-emails/2022-09-12.md
Normal file
15
website/src/pages/daily-emails/2022-09-12.md
Normal file
|
@ -0,0 +1,15 @@
|
|||
---
|
||||
title: "A month of daily emails"
|
||||
pubDate: "2022-09-12"
|
||||
permalink: "archive/2022/09/12/month-daily-emails"
|
||||
---
|
||||
|
||||
It’s already been a month since I started my email list and writing daily emails.
|
||||
|
||||
Since then, I’ve written emails on various development and workflow-based topics, including Drupal, Git, Docker, Neovim, Ansible and Tailwind CSS.
|
||||
|
||||
The first email was written on Thursday the 12th of August and after initially wondering whether I should start on the upcoming Monday, or how often to post, I decided to jump in with both feet and wrote the first daily post that day. The first few weren't actually emailed as I waited to see if I could sustain writing a daily post (I was just posting them to my website), but after a few days, I set up the email list and started sending the posts.
|
||||
|
||||
I can confirm what [Jonathan Stark](https://jonathanstark.com) and [Jonathan Hall](https://jhall.io) have said - that it's easier to write daily and that you start to see topic ideas everywhere. I started with a list of between 20 and 25 ideas and still have most of them as I've pivoted on a day's topic based on an article or tweet that I saw, some code that I'd written, or some approach that I took.
|
||||
|
||||
If you're considering starting a daily email list, I'd recommend it.
|
67
website/src/pages/daily-emails/2022-09-14.md
Normal file
67
website/src/pages/daily-emails/2022-09-14.md
Normal file
|
@ -0,0 +1,67 @@
|
|||
---
|
||||
title: "The simplest Drupal test"
|
||||
pubDate: "2022-09-14"
|
||||
permalink: "archive/2022/09/14/simpletest-drupal-test"
|
||||
---
|
||||
|
||||
Most of my work uses the Drupal framework, and I've given talks and workshops on automated testing and building custom Drupal modules with test-driven development. Today, I wanted to see how quickly I could get a working test suite on a new Drupal project.
|
||||
|
||||
I cloned a fresh version of my [Docker Examples repository](https://github.com/opdavies/docker-examples) and started the Drupal example.
|
||||
|
||||
I ran `mkdir -p web/modules/custom/example/tests/src/Functional` to create the directory structure that I needed, and then `touch web/modules/custom/example/tests/src/Functional/ExampleTest.php` to create a new test file and populated it with some initial code:
|
||||
|
||||
```php
|
||||
<?php
|
||||
|
||||
namespace Drupal\Tests\example\Functional;
|
||||
|
||||
use Drupal\Tests\BrowserTestBase;
|
||||
use Symfony\Component\HttpFoundation\Response;
|
||||
|
||||
class ExampleTest extends BrowserTestBase {
|
||||
|
||||
protected $defaultTheme = 'stark';
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
For the simplest test, I decided to test some existing Drupal core functionality - that an anonymous user can view the front page:
|
||||
|
||||
```php
|
||||
/** @test */
|
||||
public function the_front_page_loads_for_anonymous_users() {
|
||||
$this->drupalGet('<front>');
|
||||
|
||||
$this->assertSession()->statusCodeEquals(Response::HTTP_OK);
|
||||
}
|
||||
```
|
||||
|
||||
To execute the test, I ran `SIMPLETEST_DB=sqlite://localhost//dev/shm/test.sqlite SIMPLETEST_BASE_URL=http://web phpunit -c web/core web/modules/custom`. The environment variables could be added to a `phpunit.xml.dist` file but I decided to add them to the command and use Drupal core's PHPUnit configuration file.
|
||||
|
||||
As this is existing functionalty, the test passes. I can change either the path or the response code to ensure it also fails when expected.
|
||||
|
||||
With the first test working, it's easy to add more for other functionality, such as whether different users should be able to access administration pages:
|
||||
|
||||
```php
|
||||
/** @test */
|
||||
public function the_admin_page_is_not_accessible_to_anonymous_users() {
|
||||
$this->drupalGet('admin');
|
||||
|
||||
$this->assertSession()->statusCodeEquals(Response::HTTP_FORBIDDEN);
|
||||
}
|
||||
|
||||
/** @test */
|
||||
public function the_admin_page_is_accessible_by_admin_users() {
|
||||
$adminUser = $this->createUser([
|
||||
'access administration pages',
|
||||
]);
|
||||
|
||||
$this->drupalLogin($adminUser);
|
||||
|
||||
$this->drupalGet('admin');
|
||||
|
||||
$this->assertSession()->statusCodeEquals(Response::HTTP_OK);
|
||||
}
|
||||
```
|
||||
|
||||
Hopefully, this shows how quickly you can get tests running for a Drupal module. If you'd like to see more, the slides and video recording of my [Test-Driven Drupal talk]({{site.url}}/talks/tdd-test-driven-drupal) are online.
|
111
website/src/pages/daily-emails/2022-09-16.md
Normal file
111
website/src/pages/daily-emails/2022-09-16.md
Normal file
|
@ -0,0 +1,111 @@
|
|||
---
|
||||
title: "Why I mostly write functional and integration tests"
|
||||
pubDate: "2022-09-16"
|
||||
permalink: "archive/2022/09/16/why-mostly-write-functional-and-integration-tests"
|
||||
tags: ["drupal"]
|
||||
---
|
||||
|
||||
In [Wednesday's email]({{site.url}}/archive/2022/09/14/simpletest-drupal-test), I showed how quick it is to get started writing automated tests for a new Drupal module, starting with a functional test.
|
||||
|
||||
I prefer the outside-in style (or London approach) of test-driven development, where I start with a the highest-level test that I can for a task. If the task needs me to make a HTTP request, then I’ll use a functional test. If not, I’ll use a kernel (or integration) test.
|
||||
|
||||
I find that these higher-level types of tests are easier and quicker to set up compared to starting with lower-level unit tests, cover more functionality, and make it easier to refactor.
|
||||
|
||||
## An example
|
||||
|
||||
For example, this `Device` class which is a data transfer object around Drupal's `NodeInterface`. It ensures that the correct type of node is provided, and includes a named constructor and a helper method to retrieve a device's asset ID from a field:
|
||||
|
||||
```php
|
||||
final class Device {
|
||||
|
||||
private NodeInterface $node;
|
||||
|
||||
public function __construct(NodeInterface $node) {
|
||||
if ($node->bundle() != 'device') {
|
||||
throw new \InvalidArgumentException();
|
||||
}
|
||||
|
||||
$this->node = $node;
|
||||
}
|
||||
|
||||
public function getAssetId(): string {
|
||||
return $this->node->get('field_asset_id')->getString();
|
||||
}
|
||||
|
||||
public static function fromNode(NodeInterface $node): self {
|
||||
return new self($node);
|
||||
}
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
## Testing getting the asset ID using a unit test
|
||||
|
||||
As the `Node::create()` method (what I'd normally use to create a node) interacts with the database, I need to create a mock node to wrap with my DTO.
|
||||
|
||||
I need to specify what value is returned from the `bundle()` method as well as getting the asset ID field value.
|
||||
|
||||
I need to mock the `get()` method and specify the field name that I'm getting the value for, which also returns it's own mock for `FieldItemListInterface` with a value set for the `getString()` method.
|
||||
|
||||
```php
|
||||
/** @test */
|
||||
public function should_return_an_asset_id(): void {
|
||||
// Arrange.
|
||||
$fieldItemList = $this->createMock(FieldItemListInterface::class);
|
||||
|
||||
$fieldItemList
|
||||
->method('getString')
|
||||
->willReturn('ABC');
|
||||
|
||||
$deviceNode = $this->createMock(NodeInterface::class);
|
||||
|
||||
$deviceNode
|
||||
->method('bundle')
|
||||
->willReturn('device');
|
||||
|
||||
$deviceNode
|
||||
->method('get')
|
||||
->with('field_asset_id')
|
||||
->willReturn($fieldItemList);
|
||||
|
||||
// Act.
|
||||
$device = Device::fromNode($deviceNode);
|
||||
|
||||
// Assert.
|
||||
self::assertSame('ABC', $device->getAssetId());
|
||||
}
|
||||
```
|
||||
|
||||
This is quite a long 'arrange' section for this test, and just be confusing for those new to automated testing.
|
||||
|
||||
If I was to refactor from using the `get()` and `getString()` methods to a different implementation, it's likely that the test would fail.
|
||||
|
||||
## Refactoring to a kernel test
|
||||
|
||||
This is how I could write the same test using a kernel (integration) test:
|
||||
|
||||
```php
|
||||
/** @test */
|
||||
public function should_return_an_asset_id(): void {
|
||||
// Arrange.
|
||||
$node = Node::create([
|
||||
'field_asset_id' => 'ABC',
|
||||
'type' => 'device'
|
||||
]);
|
||||
|
||||
// Assert.
|
||||
self::assertSame('ABC', Device::fromNode($node)->getAssetId());
|
||||
}
|
||||
```
|
||||
|
||||
I can create a real `Node` object, pass that to the `Device` DTO, and call the `getAssetId()` method.
|
||||
|
||||
As I can interact with the database, there's no need to create mocks or define return values.
|
||||
|
||||
The 'arrange' step is much smaller, and I think that this is easier to read and understand.
|
||||
|
||||
### Trade-offs
|
||||
|
||||
Even though the test is cleaner, because there are no mocks there's other setup to do, including having the required configuration available, enabling modules, and installing schemas and configuration as part of the test - and having test-specific modules to store the needed configuration files.
|
||||
|
||||
Because of this, functional and kernel tests will take more time to run than unit tests, but an outside-in approach could be worth considering, depending on your project and team.
|
21
website/src/pages/daily-emails/2022-09-17.md
Normal file
21
website/src/pages/daily-emails/2022-09-17.md
Normal file
|
@ -0,0 +1,21 @@
|
|||
---
|
||||
title: "Thoughts on automated code formatting"
|
||||
pubDate: "2022-09-17"
|
||||
permalink: "archive/2022/09/17/thoughts-automated-code-formatting"
|
||||
---
|
||||
|
||||
For a long time, I've been focused on writing code that complies with defined coding standards, either to pass an automated check from a tool like PHP Code Sniffer (PHPCS) or eslint, or a code review from a team member.
|
||||
|
||||
Complying with the standards though is something that I've done manually.
|
||||
|
||||
As well as automated tools for linting the code, there are tools like PHP Code Beautifier and Fixer, and Prettier for formatting the code based on the same standards, which I've started to use more recently.
|
||||
|
||||
These tools can be run on the command line, VS Code has a "Format on save" option, and I can do the same in Neovim using an auto-command that runs after writing a file if an LSP is attached. I typically use a key mapping for this though so I can run it when I need, rather than it running automatically every time a file is saved.
|
||||
|
||||
One of my concerns with automated code formatting is what to do when working with existing code that doesn't already follow the standards. If I need to make a change to a file, with automated formatting, the rest of the file can change due to formatting being applied when I save my change.
|
||||
|
||||
I recently introduced a PHPCS step to a CI pipeline for an existing project. I knew that it was going to fail initially, but I was able to see the list of errors. I ran the code formatter on each of the files to fix the errors, committed and pushed the changes, and watched the pipeline run successfully.
|
||||
|
||||
This meant that I had a commit reformatting all of the affected files, but it was good to combine these together rather than having them separate, and not mixed with any other changes like a new feature or a bug fix.
|
||||
|
||||
Since doing this, it's been nice when working in this codebase to not have to worry about code style violations, and I can focus on writing the code that I need to, knowing that I can rely on the automated formatting to fix any issues before I commit them.
|
26
website/src/pages/daily-emails/2022-09-19.md
Normal file
26
website/src/pages/daily-emails/2022-09-19.md
Normal file
|
@ -0,0 +1,26 @@
|
|||
---
|
||||
title: "Useful Git configuration"
|
||||
pubDate: "2022-09-19"
|
||||
permalink: "archive/2022/09/19/useful-git-configuration"
|
||||
tags: ["git"]
|
||||
---
|
||||
|
||||
Here are some snippets from my Git configuration file.
|
||||
|
||||
These days, I use a much simpler workflow and configuration since doing more trunk-based development, but in general, I rebase instead of merging by default, and prefer to use fast-forward merges that doesn't create a merge commit.
|
||||
|
||||
`branch.autosetuprebase = always` and `pull.rebase = true` configure Git to always rebase instead of pull. It does this for all branches, though I might override this for `main` branches.
|
||||
|
||||
`pull.ff = only` and `merge.ff = only` prevents creating a merge commit and will prevent the merge if it would create one. If I needed to override this, I could by using the `--no-ff` option on the command line.
|
||||
|
||||
I use `checkout.defaultRemote = origin` to ensure that the `origin` remote is used if I have multiple remotes configured, and `push.default = upstream` to set the default remote to push to.
|
||||
|
||||
`merge.autoStash` allows for running merges on a dirty worktree by automatically creating and re-applying a stash of the changes, and `fetch.prune` will automatically prune branches on fetch - keeping things tidy.
|
||||
|
||||
I also have and use a number of aliases.
|
||||
|
||||
Some like `pl = pull` and `ps = push` are shorter versions of existing commands, and some like `aa = add --all`, `fixup = commit --fixup` and some additional arguments to commands.
|
||||
|
||||
I also have some like `current-branch = rev-parse --abbrev-ref HEAD` and `worktrees = worktree list` which add simple additional commands, and some like `repush = !git pull --rebase && git push` which use execute shell commands to execute more complex commands or combine multiple commands.
|
||||
|
||||
This is a snapshot of my Git configuration. The [full version is on GitHub](https://github.com/opdavies/dotfiles/blob/7e935b12c09358adad480a566988b9cbfaf5999e/roles/git/files/.gitconfig).
|
26
website/src/pages/daily-emails/2022-09-20.md
Normal file
26
website/src/pages/daily-emails/2022-09-20.md
Normal file
|
@ -0,0 +1,26 @@
|
|||
---
|
||||
title: "Why I like trunk-based development"
|
||||
pubDate: "2022-09-20"
|
||||
permalink: "archive/2022/09/20/why-like-trunk-based-development"
|
||||
tags: ["git"]
|
||||
---
|
||||
|
||||
For the majority of my software development career, I've worked with version control in a very similar way.
|
||||
|
||||
There are one or two long-lived branches, usually a combination of `develop`, `master` or `main`, that contain the production version of the code. When starting work on a new feature or bug fix, a new branch is created where the changes are made in isolation, and is submitted for review once complete. This is typically referred to as "Git Flow" or "GitHub Flow".
|
||||
|
||||
Whilst those changes are awaiting review, a new task is started and the process is repeated.
|
||||
|
||||
## Trunk-based development
|
||||
|
||||
Something that I've been practicing and advocating for lately is trunk-based development, where there's only one branch that everyone works on, and commits and pushes to instead of creating separate per-task branches.
|
||||
|
||||
Even on a client project where I was the only Developer, I was used to creating per-task branches and I can recall when trying to demo two features to a client and the application broke when switching between branches.
|
||||
|
||||
The vast majority of the time, whether working individually or on a team, I've found that the per-task branches weren't needed and working on a single branch was easier and simpler.
|
||||
|
||||
There are still occassions when a temporary branch is needed, but in general, all changes are made to the single branch.
|
||||
|
||||
Trunk-based development ties in nicely with the continuous integration approach, where everyone commits and pushes their work at least once a day - ideally, multiple times a day. This eliminates long-running feature or bug fix branches that get out of sync with the main branch as well as conflicting with each other.
|
||||
|
||||
It seemed scary to begin with, having been used to per-task branches and asynchronous peer reviews via pull or merge requests, but trunk-based development has made things simpler and encourages other best practices such as pair and mob programming. having a good CI pipeline to identify regressions, using feature flags to separate code deployments from feature releases, and frequent code integration and deployment via continuous commits and pushes.
|
34
website/src/pages/daily-emails/2022-09-21.md
Normal file
34
website/src/pages/daily-emails/2022-09-21.md
Normal file
|
@ -0,0 +1,34 @@
|
|||
---
|
||||
title: "Being a Drupal contribution mentor"
|
||||
pubDate: "2022-09-21"
|
||||
permalink: "archive/2022/09/21/being-drupal-contribution-mentor"
|
||||
tags: ["drupal"]
|
||||
---
|
||||
|
||||
This week is DrupalCon Prague, and although I'm not at this event, I'd like to write about some my experiences at DrupalCon - in particular about being a contribution mentor.
|
||||
|
||||
## My first DrupalCon
|
||||
|
||||
The first DrupalCon that I attended was in 2013, also in Prague.
|
||||
|
||||
I was enjoying the session days when I stopped at the mentoring table to find out more about the contribution sprints that were happening on the Friday.
|
||||
|
||||
I didn't have any commits in Drupal core but had already worked on and released some of my own contributed modules, so I was familiar with the tools and the Drupal.org contribution workflow. In short, I was signed up to be a mentor during the sprints.
|
||||
|
||||
I remember being involved in the preparation too, sitting in a hotel lobby, identifying potential issues for new contributors to work on, alongside people who I'd previously interacted with in the issue queues on Drupal.org.
|
||||
|
||||
On the day, I helped new contributors get their local environments up and running, select issues to work on, and perform tasks like creating and re-rolling patch files and submitting them for review.
|
||||
|
||||
One of my highlights at the end of the day was the live commit, when a patch that a new contributor had worked on that day was committed to Drupal core live on stage!
|
||||
|
||||
Whenever I've attended DrupalCon events since, I've always volunteered to be a contribution mentor, as well as mentoring and organising sprints at other Drupal events.
|
||||
|
||||
## The Five Year Issue
|
||||
|
||||
One of the most memorable times mentoring was whilst working with a group of contributors at DrupalCon in May 2015.
|
||||
|
||||
Someone was working on a Drupal core issue that was very similar to [one that I'd looked at](https://www.drupal.org/project/drupal/issues/753898) a few years before.
|
||||
|
||||
We focused on the original issue that I'd commented on, reviewed, tested, and re-rolled the patch, fixed a failing test, and marked it as "reviewed and tested by the community".
|
||||
|
||||
A few days after the conference, and just over five years after my original comment, the patch was committed - giving my contributors their first commits to Drupal 8 core, and also [one of mine](https://git.drupalcode.org/project/drupal/-/commits/9.5.x?search=opdavies).
|
20
website/src/pages/daily-emails/2022-09-22.md
Normal file
20
website/src/pages/daily-emails/2022-09-22.md
Normal file
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
title: "Releasing a Drupal module template"
|
||||
pubDate: "2022-09-22"
|
||||
permalink: "archive/2022/09/22/releasing-drupal-module-template"
|
||||
tags: ["drupal"]
|
||||
---
|
||||
|
||||
Today, I an the idea to create a reusable template for new Drupal modules, based on how I like to build modules and how I've shown others to do so in my Drupal testing workshop.
|
||||
|
||||
So I did, and released it for free [on my GitHub account](https://github.com/opdavies/drupal-module-template).
|
||||
|
||||
Like my Tailwind CSS starter theme on Drupal.org, it's not intended to be added as a module directly, but something that can be cloned and used as a base for people's own modules.
|
||||
|
||||
It includes an example route and Controller that load a basic page, and has a test to ensure that the page exists and loads correctly.
|
||||
|
||||
The Controller is defined as a service and uses autowiring to automatically inject the its dependencies, the same as in my workshop example code.
|
||||
|
||||
It's the initial release so it's rough around the edges still. I'll use it tomorrow to create a new module and document the steps to add to the README as well as other pieces of documentation.
|
||||
|
||||
If you're creating a new Drupal module and try it out, start a discussion on the GitHub repository or [let me know on Twitter](https://twitter.com/opdavies). If you have questions, create a discussion or just reply to this email and I'll get back to you.
|
44
website/src/pages/daily-emails/2022-09-23.md
Normal file
44
website/src/pages/daily-emails/2022-09-23.md
Normal file
|
@ -0,0 +1,44 @@
|
|||
---
|
||||
title: "ADRs and Technical Design Documents"
|
||||
pubDate: "2022-09-23"
|
||||
permalink: "archive/2022/09/23/adrs-technical-design-documents"
|
||||
tags: []
|
||||
---
|
||||
|
||||
## Architectural Decision Records
|
||||
|
||||
Architectural Decision Records (ADRs) are documents to record software design choices. They could be saved in your code repository as plain-text or Markdown files, or stored in Confluence or a wiki - wherever your team stores its documentation.
|
||||
|
||||
They usually consist of the sections:
|
||||
|
||||
* Status - is it proposed, accepted, rejected, deprecated, superseded, etc.?
|
||||
* Context - what is the issue that is causing the decision or change?
|
||||
* Decision - what is the change that's being done or proposed?
|
||||
* Consequences - what becomes easier or more difficult to do?
|
||||
|
||||
Any change that is architecturally significant should require an ADR to be written, after which it can be reviewed and potentially actioned.
|
||||
|
||||
These will remain in place to form a decision log, with specific ADRs being marked as superseded if a newer ADR replaces it.
|
||||
|
||||
## Technical Design Documents
|
||||
|
||||
A similar type of document are Technical Design Documents (TDDs), that I first saw on TheAltF4Stream. I like to think of these as lightweight ADRs.
|
||||
|
||||
The first heading is always "What problem are we trying to solve?", or sometimes just "The problem".
|
||||
|
||||
Similar to the Context heading in an ADR, this should include a short paragraph describing the issue.
|
||||
|
||||
Unlike ADRs, there are no other set headings but these are some suggested ones:
|
||||
|
||||
- What is the current process?
|
||||
- What are any requirements?
|
||||
- How do we solve this problem?
|
||||
- Alternative approaches
|
||||
|
||||
I like after describing the problem, being able to move straight into describing what's appropriate and relevant for this task and ignore sections that aren't needed.
|
||||
|
||||
When I started writing ADRs, they all had the 'Accepted' status as I was either writing them for myself or in a pair or mob. As wasn't adding any value, I've removed it since switching to writing TDDs.
|
||||
|
||||
Whether you use ADRs, TDDs or another approach, it's very useful to have a log of all of your architectural design decisions, both looking back in the future to remember why something was done in a certain way, or before you start implementing a solution to review the problem, evaluate the requirements and all potential solutions and document the selected one any why it was selected.
|
||||
|
||||
[Find our more about ADRs](https://adr.github.io) or [find out more about TDDs](https://altf4.wiki/t/how-do-i-write-a-tdd/21).
|
28
website/src/pages/daily-emails/2022-09-25.md
Normal file
28
website/src/pages/daily-emails/2022-09-25.md
Normal file
|
@ -0,0 +1,28 @@
|
|||
---
|
||||
title: "Using a component library for front-end development"
|
||||
pubDate: "2022-09-25"
|
||||
permalink: "archive/2022/09/25/using-component-library-for-front-end-development"
|
||||
tags: []
|
||||
---
|
||||
|
||||
On a current project, I've decided to use a component library as the first place to do front-end development.
|
||||
|
||||
I'm using [Fractal](https://fractal.build) as I can use Twig for templates. As Drupal also uses Twig templates, I have more reusabilty between the components in Fractal and Drupal compared to converting them from a different templating language like Handlebars or Nunjucks.
|
||||
|
||||
Rather than developing directly within the custom Drupal theme, I've been creating new components and pages initially within Fractal.
|
||||
|
||||
I have been able to create new components quickly and easily with the views uing Twig templates and inject data to it using a context file - a YAML file for each component that contains data that is injected automatically into the view.
|
||||
|
||||
This meant that I've been able to develop new components from scratch without needing to set up content types or paragraphs within Drupal, validate and confirm my data model, and present the templates to the client for review in Fractal. If a change is needed, it's quick to do.
|
||||
|
||||
I've also moved my asset generation step into Fractal. No CSS or JavaScript is being compiled within the Drupal theme, it is created within Fractal and copied over with the Twig templates.
|
||||
|
||||
In most cases, I've been able to copy the Twig templates into Drupal and replace the static context data with dynamic data from Drupal without needing to make any further changes.
|
||||
|
||||
In a couple of situations, I've needed to change my implementation slightly when moving a template into Drupal, so in this workflow, I've made the changes in Fractal and re-exported them to keep things in sync between the two systems.
|
||||
|
||||
In situations where there is existing markup and/or styles from the Drupal side, I've copied those into Fractal so that they match before adding the additional styling and any markup changes.
|
||||
|
||||
In general, I like the approach as it gives me more flexibility upfront to make changes before needing to configure Drupal. I can see how things could get out of sync between the two systems, but hopefully having the assets compiled in Fractal and needing to copy them into Drupal will keep things synced up.
|
||||
|
||||
I don't think that I'd use this approach for all projects, but for this one, where I'm working with multiple themes and will need to later add different variants of pages and components, it's worked well so far.
|
20
website/src/pages/daily-emails/2022-09-26.md
Normal file
20
website/src/pages/daily-emails/2022-09-26.md
Normal file
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
title: "Experimenting with the Nix package manager"
|
||||
pubDate: "2022-09-26"
|
||||
permalink: "archive/2022/09/26/experimenting-with-the-nix-package-manager"
|
||||
tags: ["nix"]
|
||||
---
|
||||
|
||||
After seeing it on some recent live streams and YouTube videos, I've recently been trying out the Nix package manager and looking into how I might use it for my local environment setup - potentially replacing some of my current Ansible configuration.
|
||||
|
||||
Separate from the NixOS operating system, Nix is a cross-platform package manager, so instead of using `apt` on Ubuntu and `brew` on macOS, you could run Nix on both and install from the 80,000 packages listed on https://search.nixos.org/packages.
|
||||
|
||||
There is a community project called Home Manager which can be installed alongside Nix which, similar to Stow or what I'm doing with Ansible, can manage your dotfiles or even create them from your Home Manager configuration, and can manage plugins for other tools such as ZSH and tmux.
|
||||
|
||||
There's also a Nix feature called "Flakes" which allow you to separate configuration for different operating systems. I currently have a flake for Pop!\_OS which installs all of my packages and a minimal flake for my WSL2 environment as some of the packages are installed in Windows instead of Linux.
|
||||
|
||||
I can see Ansible still being used to set up my post-setup tasks such as cloning my initial projects, but the majority of my current Ansible setup where I'm installing and configuring packages I think could be moved to Nix.
|
||||
|
||||
I have a work-in-progress Nix-based version [in my dotfiles repository](https://github.com/opdavies/dotfiles/tree/7c3436c553f8b81f99031e6bcddf385d47b7e785) where you can also see [how I've configured Git with Home Manager](https://github.com/opdavies/dotfiles/blob/7c3436c553f8b81f99031e6bcddf385d47b7e785/home-manager/modules/git.nix).
|
||||
|
||||
I may install NixOS on an old laptop to test that out too.
|
16
website/src/pages/daily-emails/2022-09-27.md
Normal file
16
website/src/pages/daily-emails/2022-09-27.md
Normal file
|
@ -0,0 +1,16 @@
|
|||
---
|
||||
title: "Mentoring with Drupal Career Online"
|
||||
pubDate: "2022-09-27"
|
||||
permalink: "archive/2022/09/27/mentoring-with-drupal-career-online"
|
||||
tags: ["drupal"]
|
||||
---
|
||||
|
||||
Today, I met my new mentee from the Drupal Career Online program.
|
||||
|
||||
[As well as mentoring at events like DrupalCamps and DrupalCons]({{site.url}}/archive/2022/09/21/being-drupal-contribution-mentor), I enjoy mentoring and working with new Developers going through bootcamps and training programmes like Drupal Career Online, some who are experienced Developers who are learning a new skill, and some who are learning how to code and are taking their first steps into programming.
|
||||
|
||||
I've talked about [how I got started programming]({{site.url}}/archive/2022-08-28/how-started-programming), but as self-taught Developer, it would have been great to have had a mentor to ask questions of, to help me get me started, and to make sure that I was going down the right track and learning the correct things.
|
||||
|
||||
Maybe this is more applicable these days with more people learning and working from home since COVID-19?
|
||||
|
||||
Similar to helping mentees at a contribution sprint work towards their first commits to Drupal, it's great to be able to introduce new Developers to a open-source project and community such as Drupal, help develop their skills, and hopefully enable them to get the new job and career that they want.
|
20
website/src/pages/daily-emails/2022-09-28.md
Normal file
20
website/src/pages/daily-emails/2022-09-28.md
Normal file
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
title: "Mob programming at PHP South Wales"
|
||||
pubDate: "2022-09-28"
|
||||
permalink: "archive/2022/09/28/mob-programming-php-south-wales"
|
||||
tags: []
|
||||
---
|
||||
|
||||
Tonight was our September meetup for the PHP South Wales user group, where I ran a hands-on session on mob programming.
|
||||
|
||||
I created [a small slide deck](https://speakerdeck.com/opdavies/an-introduction-to-mob-programming) before we started a mob session with the group.
|
||||
|
||||
We worked on the FizzBuzz kata in PHP, using Pest for our automated tests.
|
||||
|
||||
We followed the Driver and Navigator model, with one person responsible for the typing and interpreting the instructions from the Navigators, and switched roles every ten minutes.
|
||||
|
||||
You can [see the code that we wrote](https://github.com/opdavies/code-katas/blob/1da5dd5a79bc7ca083c0c4216fc3b4b0854f623d/php/tests/FizzBuzzTest.php) on my code katas GitHub repository.
|
||||
|
||||
It was a fun experience and nice to code with some people who I hadn't coded with before.
|
||||
|
||||
We did some code kata sessions during our online meetups which also seemed to go well, so coding nights on katas or personal or open-source projects might be something that we do more of in the future.
|
83
website/src/pages/daily-emails/2022-09-30.md
Normal file
83
website/src/pages/daily-emails/2022-09-30.md
Normal file
|
@ -0,0 +1,83 @@
|
|||
---
|
||||
title: "Store Wars: different state management in Vue.js"
|
||||
pubDate: "2022-09-30"
|
||||
permalink: "archive/2022/09/30/store-wars-vuejs"
|
||||
tags: ["vue"]
|
||||
---
|
||||
|
||||
I'm currently working on a Vue.js application that I started building in Vue 2 before starting to use the Composition API, and then moved it to Vue 3.
|
||||
|
||||
In the original version, I was using Vuex for state management within the application, and interacting with Vuex directly within my Vue components - calling `getters` and `dispatch` to retrieve and update data.
|
||||
|
||||
As part of moving to Vue 3, I wanted to evaluate any new options, like Pinia which is now the default state management library for Vue.
|
||||
|
||||
But because I was integrating with Vuex directly, switching to an alternative would mean changing code within my components.
|
||||
|
||||
## Defining a Store interface
|
||||
|
||||
This is a situation that often occurs in back-end development - where you may need to switch to a different type of database or a different payment provider in an eCommerce application.
|
||||
|
||||
In that situation, you need a generic interface that can be used by different implementations. Because they have consistent methods, one implementation can be replaced with another or multiple can be added at the same time. This is called the Strategy design pattern, and related to the open-closed principle in SOLID.
|
||||
|
||||
This is what I did by adding a `Store` interface:
|
||||
|
||||
```javascript
|
||||
export default interface Store {
|
||||
actions: {
|
||||
addRow(): void;
|
||||
init(): void;
|
||||
removeRow(index: Number): void;
|
||||
};
|
||||
|
||||
state: {
|
||||
isLoading: boolean;
|
||||
selection: {
|
||||
items: [];
|
||||
};
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
Any store that I want to work with needs to have these defined actions and state values, so I can use them within my components knowing that they will always be available.
|
||||
|
||||
## Creating a native Vue store
|
||||
|
||||
This is one implementation of the `Store` interface, using just Vue's `reactive` function from the Composition API:
|
||||
|
||||
```javascript
|
||||
let state = reactive({
|
||||
isLoading: false,
|
||||
selection: {
|
||||
items: [],
|
||||
},
|
||||
});
|
||||
|
||||
let actions = {
|
||||
addRow(): void {
|
||||
state.selection.items.push({
|
||||
// ...
|
||||
});
|
||||
},
|
||||
|
||||
init(): void {
|
||||
state.isLoading = true;
|
||||
|
||||
// ...
|
||||
},
|
||||
|
||||
removeRow(index: number): void {
|
||||
state.selection.items.splice(index, 1);
|
||||
},
|
||||
};
|
||||
|
||||
const vueStore: Store = {
|
||||
actions,
|
||||
state: readonly(state),
|
||||
};
|
||||
|
||||
export default vueStore;
|
||||
```
|
||||
|
||||
If I needed to add a Pinia version or another library, I can create another implementation that complies with same interface.
|
||||
|
||||
Each implementation being responsible for any specifics for that library - extracting that logic from the component code making it more flexible and reusable.
|
34
website/src/pages/daily-emails/2022-10-01.md
Normal file
34
website/src/pages/daily-emails/2022-10-01.md
Normal file
|
@ -0,0 +1,34 @@
|
|||
---
|
||||
title: Why do code katas?
|
||||
pubDate: "2022-10-01"
|
||||
permalink: archive/2022/10/01/code-katas
|
||||
tags: []
|
||||
---
|
||||
|
||||
## What are code katas?
|
||||
|
||||
Code katas are programming exercises which, like katas in martial arts, use practice and repetition to improve your skills.
|
||||
|
||||
Common katas are Fizzbuzz, the Bowling score calculator, and the Gilded Rose.
|
||||
|
||||
Each gives you the criteria of what the kata should do before it can be considered complete along with any specific information, and some websites will also give you a suite of failing tests to make pass - though I prefer to write my own and follow a test-driven development approach.
|
||||
|
||||
Once you have completed the solution and the criteria is satisfied, the kata is complete.
|
||||
|
||||
## Why I do code katas
|
||||
|
||||
As I said, doing code katas improves your skills by solving problems and identifying patterns that you may see when working in your project code.
|
||||
|
||||
Different katas focus on different patterns. For example, the Fibonacci Number kata focuses on recursion, whereas the Gilded Rose kata is all about refactoring complex legacy code.
|
||||
|
||||
Doing code katas keeps your skills sharp and gives you a different perspectives as you work through different katas. You can then use and apply these within your main projects.
|
||||
|
||||
If you want to learn a new programming language then working on a kata that you've already solved in a language that you're familiar with allows you to focus on the syntax and features of the new language. I've been working on some code katas in TypeScript as I've been working with that recently, and would like to do some in Go.
|
||||
|
||||
If you work as part of a team or a part of a meetup, code katas can be worked on as a group and can introduce new skills like automated testing and test-driven development as well as providing some opportunities for team-building and socialising. If you're trying to introduce pair or mob programming, then working on code katas could be a good first step.
|
||||
|
||||
If you're just getting started with programming, working on code katas will help you learn the fundamentals and problem solving, but I'd also encourage you to put the code on GitHub and blog about each kata that you complete. Doing so will help and encourage others and also look good when applying for roles.
|
||||
|
||||
P.S. There are lists of code katas at https://github.com/gamontal/awesome-katas and https://codingdojo.org/kata, and online versions at https://www.codewars.com/join and https://exercism.org/tracks. There are many others - if you have a favourite, reply to this email and let me know.
|
||||
|
||||
I have [some GitHub repositories for my code kata solutions](https://github.com/opdavies?tab=repositories&q=katas) and will continue to build these as I do more.
|
32
website/src/pages/daily-emails/2022-10-02.md
Normal file
32
website/src/pages/daily-emails/2022-10-02.md
Normal file
|
@ -0,0 +1,32 @@
|
|||
---
|
||||
title: Minimum viable CI pipelines
|
||||
pubDate: "2022-10-02"
|
||||
permalink: archive/2022/10/02/minimum-viable-pipelines
|
||||
tags: []
|
||||
---
|
||||
|
||||
When I start a new project, and sometimes when I join an existing project, there are no CI (continuous integration) pipelines, no automated tests, and sometimes no local environment configuration.
|
||||
|
||||
In that case, where should you start when adding a CI pipeline?
|
||||
|
||||
I like to start with the simplest solution to get a passing build and to prove the concept - even if it's a "Hello, world" message. I know that the pipeline is configured correctly and runs when expected, and gives the output that I expect.
|
||||
|
||||
I like to use Docker for my development environments, partly because it's very easy to reuse the same set up within a CI pipeline just by running `docker image build` or `docker compose build`.
|
||||
|
||||
Having a task that ensures the project builds correctly is a great next step.
|
||||
|
||||
Within a Dockerfile, I run commands to validate my lock files, download and install dependencies from public and private repositories, and often apply patch files to third-party code. If a lock file is no longer in sync with its composer.json or package.json file, or a patch no longer applies, this would cause Docker and the CI pipeline to fail and the error can be caught and fixed within the pipeline.
|
||||
|
||||
Next, I'd look to run the automated tests. If there aren't any tests, I'd create an example test that will pass to prove the concept, and expect to see the number of tests grow as new features are added and as bugs are fixed.
|
||||
|
||||
The big reason to have automated tests running in a pipeline is that all the tests are run every time, ensuring that the test suite is always passing and preventing regressions across the codebase. If any test fails, the pipeline fails. This is knows as continuous delivery - ensuring that code is always in a releasable state.
|
||||
|
||||
From there, I'd look to add additional tasks such as static analysis and code linting, as well as anything else to validate, build or deploy the code and grow confidence that a passing CI pipeline means that the code is releasable.
|
||||
|
||||
As more tasks are added to the pipeline, and the more of the code the tasks cover (e.g. test coverage) the more it can be replied upon.
|
||||
|
||||
If there is a failure that wasn't caught in the CI pipeline, then the pipeline itself should be iterated on and improved.
|
||||
|
||||
Having a CI pipeline allows you to identify issues sooner and fix them quicker, encourages best practices like automated testing and test-driven development, and enables continuous deployment where code is automatically deployed after a passing build.
|
||||
|
||||
If you have a project without a CI pipeline, I'd encourage you to add one, to start small, and continuously iterate on it over time - adding tasks that are useful and valuable, and that build confidence that you can safely release when you need to.
|
75
website/src/pages/daily-emails/2022-10-03.md
Normal file
75
website/src/pages/daily-emails/2022-10-03.md
Normal file
|
@ -0,0 +1,75 @@
|
|||
---
|
||||
title: Refactoring to value objects
|
||||
pubDate: "2022-10-03"
|
||||
permalink: archive/2022/10/03/refactoring-value-objects
|
||||
tags: [php]
|
||||
---
|
||||
|
||||
|
||||
Here's a snippet of some Drupal code that I wrote last week. It's responsible for converting an array of nodes into a Collection of one of it's field values.
|
||||
|
||||
```php
|
||||
return Collection::make($stationNodes)
|
||||
->map(fn (NodeInterface $station): string => $station->get('field_station_code')->getString())
|
||||
->values();
|
||||
```
|
||||
|
||||
There are two issues with this code.
|
||||
|
||||
First, whilst I'm implicitly saying that it accepts a certain type of node, because of the `NodeInterface` typehint this could accept any type of node. If that node doesn't have the required field, the code will error - but I'd like to know sooner if an incorrect type of node is passed and make it explicit that only a certain type of node can be used.
|
||||
|
||||
Second, the code for getting the field values is quite verbose and is potentially repeated in other places within the codebase. I'd like to have a simple way to access these field values that I can reuse anywhere else. If the logic for getting these particular field values changes, then I'd only need to change it in one place.
|
||||
|
||||
## Introducing a value object
|
||||
|
||||
This is the value object that I created.
|
||||
|
||||
It accepts the original node but checks to ensure that the node is the correct type. If not, an Exception is thrown.
|
||||
|
||||
I've added a helper method to get the field value, encapsulating that logic in a reusable function whilst making the code easier to read and its intent clearer.
|
||||
|
||||
```php
|
||||
namespace Drupal\mymodule\ValueObject;
|
||||
|
||||
use Drupal\node\NodeInterface;
|
||||
|
||||
final class Station implements StationInterface {
|
||||
|
||||
private NodeInterface $node;
|
||||
|
||||
private function __construct(NodeInterface $node) {
|
||||
if ($node->bundle() != 'station') {
|
||||
throw new \InvalidArgumentException();
|
||||
}
|
||||
|
||||
$this->node = $node;
|
||||
}
|
||||
|
||||
public function getStationCode(): string {
|
||||
return $this->node->get('field_station_code')->getString();
|
||||
}
|
||||
|
||||
public static function fromNode(NodeInterface $node): self {
|
||||
return new self($node);
|
||||
}
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
## Refactoring to use the value object
|
||||
|
||||
This is what my code now looks like:
|
||||
|
||||
```php
|
||||
return Collection::make($stationNodes)
|
||||
->map(fn (NodeInterface $node): StationInterface => Station::fromNode($node))
|
||||
->map(fn (StationInterface $station): string => $station->getStationCode())
|
||||
->values();
|
||||
```
|
||||
<<<<<<< HEAD:website/source/_daily_emails/2022-10-03.md
|
||||
=======
|
||||
|
||||
>>>>>>> b9cea6d (chore: replace Sculpin with Astro):website/src/pages/daily-emails/2022-10-03.md
|
||||
I've added an additional `map` to convert the nodes to the value object, but the second map can now use the new typehint - ensuring better type safety and also giving us auto-completion in IDEs and text editors. If an incorrect node type is passed in, then the Exception will be thrown and a much clearer error message will be shown.
|
||||
|
||||
Finally, I can use the helper method to get the field value, encapsulating the logic within the value object and making it intention clearer and easier to read.
|
21
website/src/pages/daily-emails/2022-10-08.md
Normal file
21
website/src/pages/daily-emails/2022-10-08.md
Normal file
|
@ -0,0 +1,21 @@
|
|||
---
|
||||
layout: ../../layouts/PageLayout.astro
|
||||
title: First impressions of Astro
|
||||
pubDate: "2022-10-08"
|
||||
permalink: archive/2022/10/08/first-impressions-astro
|
||||
tags: [astro]
|
||||
---
|
||||
|
||||
This week I attended another of Simon Vrachliotis' Pro Tailwind workshops.
|
||||
|
||||
The workshop again was great, teaching us about multi-style Tailwind components, such as a button that has props for variants like size, shape and impact, and how to create them in a flexible and maintainable way as well as making use of Headless UI.
|
||||
|
||||
For this workshop though, the examples and challenges used a tool that I wasn't familiar with - the Astro web framework.
|
||||
|
||||
I've seen a lot of blog posts and streams mentioning it but I hadn't tried it out for myself until the workshop.
|
||||
|
||||
What I find interesting is that it comes with a number of available integrations - from Tailwind CSS, to Vue, React, and Alpine.js, and you can use the all within the same project, or even on the same page. Installing an integration is as simple as `yarn astro add tailwindcss`.
|
||||
|
||||
The templates feel familiar and make use of front matter within Astro components, and regular YAML front matter works within Markdown files - which are supported out of the box.
|
||||
|
||||
I've been thinking of redoing my personal website and evaluating options, but I think that Astro might be a new one to add to the list.
|
24
website/src/pages/daily-emails/monorepo-or-not.md
Normal file
24
website/src/pages/daily-emails/monorepo-or-not.md
Normal file
|
@ -0,0 +1,24 @@
|
|||
---
|
||||
title: "To monorepo, or not to monorepo?"
|
||||
permalink: "archive/2022/08/31/monorepo-or-not"
|
||||
pubDate: "2022-08-31"
|
||||
tags: ["git"]
|
||||
---
|
||||
|
||||
I listened to a podcast episode recently which talked about monorepos - i.e. code repositories that contain multiple project codebases rather than a single repository for each codebase - and this got me thinking about whether I should be using these more.
|
||||
|
||||
It's something that I've been trialling recently in my [Docker examples](https://github.com/opdavies/docker-examples) and [Docker images](https://github.com/OliverDaviesLtd/docker-images) repositories, where one repository contains and builds multiple Docker images.
|
||||
|
||||
I'm not suggesting that I put all of my client projects into one repository, but at least combining the different parts of the same project into the same repository.
|
||||
|
||||
For example, I'm working for one client on their current Drupal 7 websites whilst developing the new Drupal 9 versions, which are currently in two separate repositories. I'm also developing an embeddable Vue.js application as part of the Drupal 9 website, and using Fractal as a component library. These are also in their own repositories.
|
||||
|
||||
Using a monorepo approach, all of these projects would be in the same repository.
|
||||
|
||||
I can see advantages to being able to see cross-project changes in the same place - such as an API change in Drupal that needs a update to be made in Vue.js, or vice-versa - rather than needing to look at separate repositories. This could also make versioning easier as everything will be stored and tagged inside the same repository.
|
||||
|
||||
Each project has it's own CI pipeline, so it would require some changes where I set a specific pipeline to run only when a directory is changed.
|
||||
|
||||
I see how deployments may be tricker if I need to push an update within a directory to another Git repository, which makes me wonder if I'll need to look into using subtree splits to create separate deployment repositories - similar to how the Symfony project has one main repository and then each component split into its own repository.
|
||||
|
||||
I'll keep trialling it in my open-source projects and maybe test it with some client projects, but if you have experience with monorepos that you'd like to share, then please reply to this email - I'd love to hear about it.
|
10
website/src/pages/daily.mdx
Normal file
10
website/src/pages/daily.mdx
Normal file
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
layout: ../layouts/PageLayout.astro
|
||||
title: Oliver's Daily List
|
||||
---
|
||||
|
||||
import DailyEmailForm from '../components/DailyEmailForm.astro';
|
||||
|
||||
A daily newsletter on software development, DevOps, community, and open-source.
|
||||
|
||||
<DailyEmailForm />
|
39
website/src/pages/drupal-consulting.mdx
Normal file
39
website/src/pages/drupal-consulting.mdx
Normal file
|
@ -0,0 +1,39 @@
|
|||
---
|
||||
layout: ../layouts/PageLayout.astro
|
||||
title: Oliver Davies - PHP Developer and Drupal Specialist
|
||||
---
|
||||
|
||||
export const email = "oliver@oliverdavies.uk"
|
||||
|
||||
I'm a long-time Web Developer and consultant. I’ve led, delivered, and maintained PHP, Drupal, and Drupal Commerce based websites, have worked for some of the UK’s largest and well-known PHP and Drupal agencies, and even for the Drupal Association - the nonprofit organisation behind the Drupal project - where I was employed to work on and improve the Drupal.org websites.
|
||||
|
||||
<a href={`mailto:${email}`}>Send me an email</a> to discuss your project.
|
||||
|
||||
## My Drupal Experience
|
||||
|
||||
I have contributed code to Drupal core and to various other Drupal modules, and maintain modules and themes like Override Node Options which is used on over 30, 000 Drupal sites according to Drupal.org. I’ve been a mentor at various in-person events, helping new contributors to the Drupal project, and regularly write blog posts, present talks and workshops, and create videos and live streams.
|
||||
|
||||
As well as Drupal, I’ve worked with other PHP projects like Symfony and Laravel, static site generators like Sculpin and Jekyll, and JavaScript frameworks such as Vue.js and Angular.
|
||||
|
||||
## Certifications
|
||||
|
||||
- Acquia certified Developer - Drupal 8 (2017)
|
||||
- Acquia certified Back-End Specialist - Drupal 8 (2017)
|
||||
- Acquia certified Front-End Specialist - Drupal 8 (2017)
|
||||
- Acquia certified Cloud Pro (2018)
|
||||
- Platform.sh Gold partner certification (2021, for Inviqa)
|
||||
|
||||
## Community contributions
|
||||
|
||||
- Authored an article on Drupal development using distributions for Linux Journal's Drupal issue.
|
||||
- Mentored new contributors at DrupalCon contribution days with their first patches to Drupal core.
|
||||
- Organised the Drupal Bristol and PHP South West (PHPSW) user groups, and the DrupalCamp Bristol conference.
|
||||
- Currently organise and sponsor the [PHP South Wales user group](https://www.phpsouthwales.uk).
|
||||
- Board member for the [Drupal England and Wales Association](https://drupal-england-wales.github.io) (2020 to present).
|
||||
- Selecting sessions for DrupalCon Europe 2021 as part of the DrupalCon track team.
|
||||
- Mentored students on the DrupalEasy [Drupal Career Online](https://www.drupaleasy.com/academy/dco/course-information) course.
|
||||
- Currently writing "Test-Driven Drupal", an eBook about automated testing and test-driven development in Drupal.
|
||||
|
||||
## Podcasts
|
||||
|
||||
I've been a guest on a number of podcasts, including [Talking Drupal](https://talkingdrupal.com), [How to Code Well](https://howtocodewell.fm), [That Podcast](https://thatpodcast.io), and [Voices of the ElePHPant](https://voicesoftheelephpant.com), where I've discussed topics including PHP, Drupal, CSS frameworks, and automated testing.
|
76
website/src/pages/drupal-testing.mdx
Normal file
76
website/src/pages/drupal-testing.mdx
Normal file
|
@ -0,0 +1,76 @@
|
|||
---
|
||||
layout: ../layouts/PageLayout.astro
|
||||
title: Introduction to Automated Testing and Test-Driven Development with Drupal
|
||||
---
|
||||
|
||||
import { chain as _ } from 'lodash'
|
||||
|
||||
export const drupalVersions = '9 and 10'
|
||||
|
||||
export const prices = {
|
||||
early: "395.00",
|
||||
full: "495.00",
|
||||
}
|
||||
|
||||
export const isEarlyBird = true
|
||||
|
||||
export const nextDate = '2022-04-04'
|
||||
|
||||
export const testimonials = [
|
||||
{
|
||||
name: 'Scott Euser, Head of Web Development',
|
||||
image: '/images/scott-euser.jpg',
|
||||
text: 'Oliver really knows his stuff. Whether you are just starting out or looking to take your knowledge to the next level, his patient and clear way of explaining will help get you there.',
|
||||
},
|
||||
]
|
||||
|
||||
Are you a Drupal Developer who wants to learn about automated testing and test-driven development, or do you manage a development team that you'd like to train?
|
||||
|
||||
I've delivered large Drupal projects using automated tests and test-driven development for custom functionality, and maintain Drupal modules with thousands of installations whilst using their tests to ensure working code and prevent regressions.
|
||||
|
||||
I offer an interactive full-day workshop (previously presented at DrupalCamp London, and remotely for DrupalCamp NYC) that provides an introduction to automated testing in Drupal and how to utilise test-driven development - which I've updated specifically for Drupal {drupalVersions}.
|
||||
|
||||
## Contents
|
||||
|
||||
* What is automated testing, and why write tests?
|
||||
* What types of tests are available in Drupal?
|
||||
* Outside-in vs. inside-out testing.
|
||||
* Configuring Drupal and PHPUnit to run tests locally.
|
||||
* Exercise: writing tests for existing Drupal core functionality.
|
||||
* Exercise: adding tests to an existing custom module.
|
||||
* What is test-driven development?
|
||||
* Exercise: writing a new Drupal module from scratch with test-driven development.
|
||||
* Q&A
|
||||
|
||||
<hr />
|
||||
|
||||
## Dates and prices
|
||||
|
||||
The workshop is currently only available remotely, and the next available date is <span class="font-bold">{new Date(nextDate).toLocaleDateString('en-GB', { day: 'numeric', month: 'long', year: 'numeric', })}</span>.
|
||||
|
||||
Seats are available at <span class="font-bold">{isEarlyBird ? `an early bird price of £${prices.early}` : `a price of £${prices.full}`}</span>, with a 10% discount for bulk orders of 5 or more seats.
|
||||
|
||||
<div class="mt-6">
|
||||
<a class="inline-flex items-center px-6 py-3 font-medium rounded-md bg-blue-primary text-white no-underline hover:bg-white hover:text-blue-primary focus:bg-white focus:text-blue-primary transition-color ease-in-out duration-200 text-base" href="https://buy.stripe.com/6oE3cW4Su7DA1t6144">
|
||||
Book your seat →
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<hr />
|
||||
|
||||
## Testimonials
|
||||
|
||||
{testimonials.map(testimonial => (
|
||||
<div>
|
||||
<blockquote class="mt-4">
|
||||
{testimonial.text}
|
||||
</blockquote>
|
||||
|
||||
<footer class="flex items-center space-x-4 space-x-reverse">
|
||||
<span class="text-base">{testimonial.name}</span>
|
||||
<span class="order-first">
|
||||
<img width="40" height="40" class="w-10 h-10 rounded-full border" src={testimonial.image} />
|
||||
</span>
|
||||
</footer>
|
||||
</div>
|
||||
))}
|
23
website/src/pages/index.md
Normal file
23
website/src/pages/index.md
Normal file
|
@ -0,0 +1,23 @@
|
|||
---
|
||||
layout: ../layouts/PageLayout.astro
|
||||
title: Oliver Davies - Software Developer and Consultant, PHP and Drupal specialist
|
||||
---
|
||||
|
||||
<div class="mb-4 w-32">
|
||||
<img src="/images/social-avatar.jpg" alt="Picture of Oliver" class="rounded-full border border-gray">
|
||||
</div>
|
||||
|
||||
Hi, I’m Oliver. I’m a Full Stack Software Consultant based in South Wales in the UK.
|
||||
|
||||
I architect, develop, and consult on large web applications, and work with organisations, agencies, and freelance Developers to improve their code quality by using tools and workflows such as continuous integration and deployment, automated testing, test-driven development, and static analysis.
|
||||
|
||||
I have years of software development and Drupal experience, have worked for the Drupal Association, and am an <a href="https://certification.acquia.com/user/4540">Acquia-certified Drupal expert</a>. I also work with complementary technologies such as Symfony, Vue.js, TypeScript, Docker, and Ansible.
|
||||
|
||||
I enjoy writing and contributing open-source code which you can find on my [Drupal.org] and [GitHub] profiles.
|
||||
|
||||
I regularly <a href="/talks">present talks and workshops</a> at user groups and conferences and am the organiser of the <a href="https://www.phpsouthwales.uk">PHP South Wales</a> user group.
|
||||
|
||||
<a href="/contact">Contact me</a> if you’d like any more information or to discuss a project.
|
||||
|
||||
[drupal.org]: https://drupal.org/u/opdavies
|
||||
[github]: https://github.com/opdavies
|
29
website/src/pages/links.mdx
Normal file
29
website/src/pages/links.mdx
Normal file
|
@ -0,0 +1,29 @@
|
|||
---
|
||||
layout: ../layouts/PageLayout.astro
|
||||
title: Links
|
||||
---
|
||||
|
||||
export const links = [
|
||||
{ title: 'My daily email list', url: '/daily' },
|
||||
{ title: 'Twitter', url: 'https://twitter.com/opdavies' },
|
||||
{ title: 'YouTube', url: 'https://www.youtube.com/channel/UCkeK0qF9HHUPQH_fvn4ghqQ' },
|
||||
{ title: 'LinkedIn', url: 'https://www.linkedin.com/in/opdavies' },
|
||||
{ title: 'Drupal.org', url: 'https://www.drupal.org/u/opdavies' },
|
||||
{ title: 'GitHub', url: 'https://github.com/opdavies' },
|
||||
{ title: 'GitHub Gists', url: 'https://gist.github.com/opdavies' },
|
||||
{ title: 'Packagist', url: 'https://packagist.org/packages/opdavies' },
|
||||
{ title: 'Speakerdeck', url: 'https://speakerdeck.com/opdavies' },
|
||||
{ title: 'PHP South Wales', url: 'https://www.phpsouthwales.uk' },
|
||||
];
|
||||
|
||||
<div class="mx-auto max-w-md">
|
||||
<ul class="p-0 space-y-4 list-none">
|
||||
{links && links.map(link => (
|
||||
<li>
|
||||
<a class="block p-2 w-full text-center text-black no-underline border transition duration-200 ease-in-out dark:text-white hover:text-white focus:text-white dark:hover:text-black dark:focus:text-black dark:hover:bg-white dark:focus:bg-white hover:bg-blue-primary focus:bg-blue-primary" href={link.url}>
|
||||
{link.title}
|
||||
</a>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
19
website/src/pages/pair.mdx
Normal file
19
website/src/pages/pair.mdx
Normal file
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
layout: ../layouts/PageLayout.astro
|
||||
title: Pair program with me
|
||||
---
|
||||
|
||||
I enjoy pair and mob (group) programming, so as well as [traditional freelance
|
||||
services][0], I offer paid remote pair programming sessions where I'll work
|
||||
with you on your own project via a Zoom call.
|
||||
|
||||
My experience is based around PHP, Drupal, Symfony, Vue.js, Tailwind CSS,
|
||||
Ansible, Docker, clean code, automated testing, and test-driven development.
|
||||
|
||||
I also offer free sessions for open source projects.
|
||||
|
||||
To arrange a pairing session, [find an available time on my calendar][1].
|
||||
|
||||
[0]: /drupal-php-developer
|
||||
[1]: https://savvycal.com/opdavies/pairing
|
||||
|
17
website/src/pages/search.astro
Normal file
17
website/src/pages/search.astro
Normal file
|
@ -0,0 +1,17 @@
|
|||
---
|
||||
---
|
||||
|
||||
<script>
|
||||
import Alpine from 'alpinejs';
|
||||
|
||||
window.Alpine = Alpine;
|
||||
Alpine.start();
|
||||
</script>
|
||||
|
||||
<div x-data="{ term: '' }">
|
||||
<form :action="`https://www.google.com/search?q=site%3Aoliverdavies.uk+%22` + term + `%22`">
|
||||
|
||||
<input type="text" x-model="term" />
|
||||
<button>Search</button>
|
||||
</form>
|
||||
</div>
|
34
website/src/pages/speaker-information.mdx
Normal file
34
website/src/pages/speaker-information.mdx
Normal file
|
@ -0,0 +1,34 @@
|
|||
---
|
||||
layout: ../layouts/PageLayout.astro
|
||||
title: Speaker Information
|
||||
---
|
||||
|
||||
## Bio
|
||||
|
||||
<a href="https://www.oliverdavies.uk">Oliver Davies</a> (<a href="https://twitter.com/opdavies">@opdavies</a>) has been building websites since 2007, and speaking at meetups and conferences since 2012. He is a Full Stack Developer and a certified Drupal expert who also has experience developing with Symfony, Laravel, Sculpin and Vue.js, as well as with DevOps and systems administration.
|
||||
|
||||
He is a Lead Software Developer at <a href="https://tfw.wales/?%20utm_source=oliverdavies.uk&utm_medium=speaker-information">Transport for Wales</a>, a Drupal core contributor and mentor, and an open source and contribution advocate.
|
||||
|
||||
He regularly blogs and gives talks on various topics, maintains and contributes to various open source projects, and organises the PHP South Wales user group.
|
||||
|
||||
## Photos
|
||||
|
||||
- https://www.dropbox.com/s/say1muiqedik0l4/0188395_thumb.jpg
|
||||
|
||||
## Some events that I’ve spoken at
|
||||
|
||||
- BlueConf 2019 (Cardiff, UK)
|
||||
- DrupalCamp Brighton 2015
|
||||
- DrupalCamp Bristol 2016
|
||||
- DrupalCamp Dublin 2017
|
||||
- DrupalCamp London (2014, 2015, 2016, 2017, 2019, 2020)
|
||||
- DrupalCamp North 2015 (Sunderland, UK)
|
||||
- DrupalCon Amsterdam 2019
|
||||
- DrupalCon Europe 2020 (Online)
|
||||
- Nomad PHP
|
||||
- PHP North West 2017 (Manchester, UK - 10 year anniversary)
|
||||
- PHP South Coast 2016 (Portsmouth, UK)
|
||||
- PHP UK Conference 2018 (London, UK)
|
||||
- WordCamp Bristol 2019
|
||||
|
||||
I also [gave a number of talks remotely](/blog/speaking-remotely-during-lockdown) for various user groups and conferences during COVID-19.
|
61
website/src/pages/talks/[slug].astro
Normal file
61
website/src/pages/talks/[slug].astro
Normal file
|
@ -0,0 +1,61 @@
|
|||
---
|
||||
import Layout from '../../layouts/Layout.astro'
|
||||
|
||||
export async function getStaticPaths() {
|
||||
const talks = await Astro.glob('../../talks/*.md')
|
||||
|
||||
return talks.map(talk => {
|
||||
const parts = talk.file.replace('.md', '').split('/')
|
||||
const slug = parts[parts.length - 1]
|
||||
|
||||
return {
|
||||
params: { slug },
|
||||
props: { talk }
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const { Content } = Astro.props.talk
|
||||
const { title, speakerdeck, video } = Astro.props.talk.frontmatter
|
||||
---
|
||||
|
||||
<Layout title={title}>
|
||||
<div class="space-y-6">
|
||||
<div class="markdown">
|
||||
<Content />
|
||||
</div>
|
||||
|
||||
{speakerdeck && speakerdeck.id && (
|
||||
<div>
|
||||
<h2 class="mb-2">Slides</h2>
|
||||
|
||||
<div class="slides">
|
||||
<noscript>**Please enable JavaScript to view slides.**</noscript>
|
||||
<script
|
||||
class="speakerdeck-embed"
|
||||
data-id={speakerdeck.id}
|
||||
data-ratio={speakerdeck.ratio ?? '1.29456384323641'}
|
||||
src="//speakerdeck.com/assets/embed.js"
|
||||
></script>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{video && video.type == "youtube" && (
|
||||
<div>
|
||||
<h2 class="mb-2">Video</h2>
|
||||
|
||||
<div class="video-full">
|
||||
<iframe
|
||||
width="678"
|
||||
height="408"
|
||||
src={`https://www.youtube.com/embed/${video.id}?rel=0&iv_load_policy=3`}
|
||||
frameborder="0"
|
||||
allowfullscreen
|
||||
>
|
||||
</iframe>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</Layout>
|
38
website/src/pages/talks/index.astro
Normal file
38
website/src/pages/talks/index.astro
Normal file
|
@ -0,0 +1,38 @@
|
|||
---
|
||||
import PageLayout from '../../layouts/PageLayout.astro'
|
||||
|
||||
const talks = await Astro.glob("../../talks/*.md")
|
||||
|
||||
const sortedTalks = talks
|
||||
.map(talk => {
|
||||
const parts = talk.file.replace('.md', '').split('/')
|
||||
const slug = parts[parts.length - 1]
|
||||
|
||||
return { slug, talk }
|
||||
})
|
||||
.sort((b, a) => {
|
||||
const events = [
|
||||
a.talk.frontmatter.events[a.talk.frontmatter.events.length - 1],
|
||||
b.talk.frontmatter.events[b.talk.frontmatter.events.length - 1],
|
||||
]
|
||||
|
||||
return new Date(events[0].date).valueOf() -
|
||||
new Date(events[1].date).valueOf()
|
||||
})
|
||||
---
|
||||
|
||||
<PageLayout title="Talks and workshops">
|
||||
<p>Starting with my first talk in September 2012, I have given 85 presentations and workshops at various conferences and meetups, in-person and remotely, on topics including PHP, Drupal, automated testing, Git, CSS, and systems administration.</p>
|
||||
|
||||
<div>
|
||||
{sortedTalks.map((talk) => (
|
||||
<article>
|
||||
<a href=`/talks/${talk.slug}`>
|
||||
<h2>{talk.talk.frontmatter.title}</h2>
|
||||
</a>
|
||||
|
||||
{talk.talk.frontmatter.description}
|
||||
</article>
|
||||
))}
|
||||
</div>
|
||||
</PageLayout>
|
60
website/src/pages/things-about-php.mdx
Normal file
60
website/src/pages/things-about-php.mdx
Normal file
|
@ -0,0 +1,60 @@
|
|||
---
|
||||
layout: ../layouts/PageLayout.astro
|
||||
title: Things you should know about PHP
|
||||
---
|
||||
|
||||
export const email = "oliver@oliverdavies.uk"
|
||||
|
||||
Thanks for attending my [Things you should know about PHP](/talks/things-you-should-know-about-php) talk.
|
||||
|
||||
I hope that you learned some things about PHP, its ecosystem, and its communities, and if you haven't tried using PHP yet, I'd encourage you to do so.
|
||||
|
||||
Here are links to the resources that I mention in the talk, plus a couple of extras.
|
||||
|
||||
## Resources
|
||||
|
||||
- [Composer](https://getcomposer.org) - dependency manager
|
||||
- [Drupal](https://www.drupal.org) - content management system
|
||||
- [Jigsaw](https://jigsaw.tighten.co) - static site generator
|
||||
- [Laravel](https://laravel.com) - framework
|
||||
- [Nomad PHP](https://nomadphp.com) - online user group
|
||||
- [PHP official images on Docker Hub](https://hub.docker.com/_/php)
|
||||
- [PHPStan](https://phpstan.org) - static analysis tool
|
||||
- [PHPUnit](https://phpunit.de) - testing framework
|
||||
- [Pest](https://pestphp.com) - testing framework
|
||||
- [Psalm](https://psalm.dev) - static analysis tool
|
||||
- [Sculpin](khttps://sculpin.io) - static site generator
|
||||
- [WordPress](https://wordpress.org) - content management system
|
||||
- [php.net](https://www.php.net) - online documentation
|
||||
- [php[architect]](https://www.phparch.com) - online magazine
|
||||
|
||||
## Books
|
||||
|
||||
- [Laravel: Up & Running](https://www.oreilly.com/library/view/laravel-up/9781492041207)
|
||||
- [Symfony: The Fast Track](https://symfony.com/book)
|
||||
|
||||
## Videos
|
||||
|
||||
- [Codecourse](https://codecourse.com)
|
||||
- [How to Code Well](https://www.howtocodewell.net)
|
||||
- [Laracasts](https://laracasts.com)
|
||||
- [SymfonyCasts](https://symfonycasts.com)
|
||||
|
||||
## Podcasts
|
||||
|
||||
- [How to Code Well podcast](https://howtocodewell.fm)
|
||||
- [PHPUgly](https://www.phpugly.com)
|
||||
- [Talking Drupal](https://talkingdrupal.com)
|
||||
- [The Laravel Podcast](https://laravelpodcast.com)
|
||||
- [The PHP Roundtable](https://phproundtable.com)
|
||||
- [Voices of the elePHPant](https://voicesoftheelephpant.com)
|
||||
|
||||
## Can I help?
|
||||
|
||||
Do you want to introduce PHP to your company or team, or add one of these tools to your existing PHP application?
|
||||
|
||||
I offer consulting calls and services to reduce your onboarding time and get you up and running quicker and easier.
|
||||
|
||||
<div class="mt-6">
|
||||
<a class="inline-flex items-center px-6 py-3 font-medium rounded-md bg-blue-primary text-white no-underline hover:bg-white hover:text-blue-primary focus:bg-white focus:text-blue-primary transition-color ease-in-out duration-200 text-base" href={`mailto:${email}?subject=Book in my call`}>Book in your call →</a>
|
||||
</div>
|
29
website/src/posts/10-years-working-full-time-drupal-php.md
Normal file
29
website/src/posts/10-years-working-full-time-drupal-php.md
Normal file
|
@ -0,0 +1,29 @@
|
|||
---
|
||||
title: 10 years working full time with Drupal and PHP
|
||||
excerpt: 10 years ago today, I started working for Horse & Country TV in what was my full-time Drupal development role.
|
||||
tags:
|
||||
- drupal
|
||||
- personal
|
||||
- php
|
||||
date: 2020-07-19
|
||||
---
|
||||
|
||||
<blockquote class="twitter-tweet"><p lang="en" dir="ltr">10 years ago today, I started my first full-time Web Developer job, working for <a href="https://twitter.com/HorseAndCountry?ref_src=twsrc%5Etfw">@HorseAndCountry</a> on their (at the time) <a href="https://twitter.com/hashtag/Drupal?src=hash&ref_src=twsrc%5Etfw">#Drupal</a> 6 website.</p>— Oliver Davies (@opdavies) <a href="https://twitter.com/opdavies/status/1284744784037335040?ref_src=twsrc%5Etfw">July 19, 2020</a></blockquote>
|
||||
|
||||
10 years ago today, I started working for [Horse & Country TV](https://horseandcountry.tv) in what was my full-time Drupal development role.
|
||||
|
||||
I'd been learning and working with Drupal for a couple of years prior to this, working on some personal and freelance projects, but when I was looking to move back to this area of Wales, this job on my doorstep was ideal.
|
||||
|
||||
Initially starting as the sole Developer before another started a few months later, I remember being very excited to see and learn how this site has been built. Some of the main things that I remember working on was re-developing the Events section and adding paid events with [Ubercart](https://www.drupal.org/project/ubercart), and expanding my module development knowledge by adding a custom block that programmatically showed the current and next programme on the channel.
|
||||
|
||||
As well as working with Drupal itself, it was a great opportunity to get more hands-on experience with Linux servers and to learn new tools such as [Git](https://git-scm.com) for version control.
|
||||
|
||||
I also remember being asked to contribute to a public issue on Drupal.org as part of the interview process to demonstrate my debugging abilities. I decided to look at [this Drupal 6 issue](https://www.drupal.org/node/753898), and posted a comment with some updated code that I then forwarded on, and then uploaded a patch to the issue queue. This is still one of my favourite approaches for interviews, and one that I've used myself since when interviewing people for roles that use open source technologies. I much prefer this to working on internal, company specific coding tests, as it gives the interviewee some real world experience and exposure to the project itself and its community, rather than just how to _use_ it.
|
||||
|
||||
Posting on a Drupal core issue and submitting patches was a bit scary at the time, but I think paved the way for me later contributing to core and other Drupal and open source projects. In fact, I was a Contribution Day mentor at DrupalCon Los Angeles in 2015 and helped someone get _their_ first commit to core when [a fix was committed to Drupal 8](https://git.drupalcode.org/project/drupal/commit/9cdd22c).
|
||||
|
||||
After this role, I've worked for various agencies working primarily with Drupal and PHP, as well as for the [Drupal Association](https://www.drupal.org/assocation) itself. Whilst in recent years I've also started working with other frameworks like Symfony and Vue.js, Drupal and PHP has always been my core specialism.
|
||||
|
||||
I've been very excited by the developments in both PHP and Drupal in recent versions, and I'm looking forward to the next 10 years working with them.
|
||||
|
||||
Thank you Horse & Country for giving me the chance to start on my full-time Drupal journey!
|
84
website/src/posts/2014.md
Normal file
84
website/src/posts/2014.md
Normal file
|
@ -0,0 +1,84 @@
|
|||
---
|
||||
title: 2014
|
||||
date: 2015-03-20
|
||||
excerpt: A look back at 2014.
|
||||
tags:
|
||||
- drupal-association
|
||||
- drupalcamp-london
|
||||
- personal
|
||||
tweets: true
|
||||
---
|
||||
|
||||
A lot happened in 2014. Here are some of the main things that I'd like to
|
||||
highlight.
|
||||
|
||||
## Joined the Drupal Association
|
||||
|
||||
This was the main thing for me this year, in May I left
|
||||
[Precedent](http://precedent.com) and joined the
|
||||
[Drupal Association](https://assoc.drupal.org). I work on the Engineering team,
|
||||
focused mainly on [Drupal.org](https://www.drupal.org) but I've also done some
|
||||
theming work on the DrupalCon [Amsterdam](http://amsterdam2014.drupal.org) and
|
||||
[Latin America](http://latinamerica2015.drupal.org) websites, and some
|
||||
pre-launch work on [Drupal Jobs](https://jobs.drupal.org).
|
||||
|
||||
Some of the tasks that I've worked on so far are:
|
||||
|
||||
- Fixing remaining issues from the Drupal.org Drupal 7 upgrade.
|
||||
- Improving pages for
|
||||
[Supporting Partners](https://www.drupal.org/supporters/partners),
|
||||
[Technology Supporters](https://www.drupal.org/supporters/technology) and
|
||||
[Hosting Partners](https://www.drupal.org/supporters/hosting). These
|
||||
previously were manually updated pages using HTML tables, which are now
|
||||
dynamic pages built with [Views](https://www.drupal.org/project/views) using
|
||||
organisation nodes.
|
||||
- Configuring human-readable paths for user profiles using
|
||||
[Pathauto](https://www.drupal.org/project/pathauto). Only a small change, but
|
||||
made a big difference to end-users.
|
||||
- Migration of user data from profile values to fields, and various user profile
|
||||
improvements. This was great because now we can do things like reference
|
||||
mentors by their username and display their picture on your profile, as well
|
||||
as show lists of peope listing a user as their mentor. This, I think, adds a
|
||||
more personal element to Drupal.org because we can see the actual people and
|
||||
not just a list of names on a page.
|
||||
|
||||
I've started keeping a list of tasks that I've been involved with on my
|
||||
[Work](/work/) page, and will be adding more things as I work on them.
|
||||
|
||||
### Portland
|
||||
|
||||
I was able to travel to Portland, Oregon twice last year to meet with the rest
|
||||
of the Association staff. Both times I met new people and it was great to spend
|
||||
some work and social time with everyone, and it was great to have everyone
|
||||
together as a team.
|
||||
|
||||
## My First DrupalCamp
|
||||
|
||||
In February, I attended [DrupalCamp London](http://2014.drupalcamplondon.co.uk).
|
||||
This was my first time attending a Camp, and I managed to attend some great
|
||||
sessions as well as meet people who I'd never previously met in person. I was
|
||||
also a volunteer and speaker, where I talked about
|
||||
[Git Flow](/blog/what-git-flow/) - a workflow for managing your Git projects.
|
||||
|
||||
{% include 'tweet' with {
|
||||
content: '<p>Great presentation by <a href="https://twitter.com/opdavies">@opdavies</a> on git flow at <a href="https://twitter.com/search?q=%23dclondon&src=hash">#dclondon</a> very well prepared and presented. <a href="http://t.co/tDINp2Nsbn">pic.twitter.com/tDINp2Nsbn</a></p>— Greg Franklin (@gfranklin) <a href="https://twitter.com/gfranklin/statuses/440104311276969984">March 2, 2014</a>'
|
||||
} %}
|
||||
|
||||
I was also able to do a little bit of sprinting whilst I was there, reviewing
|
||||
other people's modules and patches.
|
||||
|
||||
Attending this and [DrupalCon Prague](https://prague2013.drupal.org) in 2013
|
||||
have really opened my eyes to the face-to-face side of the Drupal community, and
|
||||
I plan on attending a lot more Camps and Cons in the future.
|
||||
|
||||
## DrupalCon Amsterdam
|
||||
|
||||
I was also able to travel to Holland and attend
|
||||
[DrupalCon Amsterdam](https://amsterdam2014.drupal.org) along with other members
|
||||
of Association staff.
|
||||
|
||||
## DrupalCamp Bristol
|
||||
|
||||
In October, we started planning for
|
||||
[DrupalCamp Bristol](http://www.drupalcampbristol.co.uk). I'm one of the
|
||||
founding Committee members,
|
30
website/src/posts/accessible-bristol-site.md
Normal file
30
website/src/posts/accessible-bristol-site.md
Normal file
|
@ -0,0 +1,30 @@
|
|||
---
|
||||
title: Accessible Bristol site launched
|
||||
date: 2012-11-15
|
||||
excerpt:
|
||||
I'm happy to report that the Accessible Bristol was launched this week, on
|
||||
Drupal 7.
|
||||
tags:
|
||||
- accessibility
|
||||
- accessible-bristol
|
||||
- nomensa
|
||||
---
|
||||
|
||||
I'm happy to announce that the
|
||||
[Accessible Bristol](http://www.accessiblebristol.org.uk) website was launched
|
||||
this week, on Drupal 7. The site has been developed over the past few months,
|
||||
and uses the [User Relationships](http://drupal.org/project/user_relationships)
|
||||
and [Privatemsg](http://drupal.org/project/privatemsg) modules to provide a
|
||||
community-based platform where people with an interest in accessibility can
|
||||
register and network with each other.
|
||||
|
||||
The site has been developed over the past few months, and uses the
|
||||
[User Relationships](http://drupal.org/project/user_relationships) and
|
||||
[Privatemsg](http://drupal.org/project/privatemsg) modules to provide a
|
||||
community-based platform where people with an interest in accessibility can
|
||||
register and network with each other.
|
||||
|
||||
The group is hosting a launch event on the 28th November at the Council House,
|
||||
College Green, Bristol. Interested? More information is available at
|
||||
<http://www.accessiblebristol.org.uk/events/accessible-bristol-launch> or go to
|
||||
<http://buytickets.at/accessiblebristol/6434> to register.
|
|
@ -0,0 +1,79 @@
|
|||
---
|
||||
title: Add a Taxonomy Term to Multiple Nodes Using SQL
|
||||
date: 2010-07-07
|
||||
excerpt: How to add a new taxonomy term to multiple nodes in Drupal using SQL.
|
||||
tags:
|
||||
- database
|
||||
- drupal-6
|
||||
- drupal-planet
|
||||
- sequal-pro
|
||||
- sql
|
||||
- taxonomy
|
||||
---
|
||||
|
||||
In preparation for my Blog posts being added to
|
||||
[Drupal Planet](http://drupal.org/planet), I needed to create a new Taxonomy
|
||||
term (or, in this case, tag) called 'Drupal Planet', and assign it to new
|
||||
content to imported into their aggregator. After taking a quick look though my
|
||||
previous posts, I decided that 14 of my previous posts were relevant, and
|
||||
thought that it would be useful to also assign these the 'Drupal Planet' tag.
|
||||
|
||||
I didn't want to manually open each post and add the new tag, so I decided to
|
||||
make the changes myself directly into the database using SQL, and as a follow-up
|
||||
to a previous post -
|
||||
[Quickly Change the Content Type of Multiple Nodes using SQL](/blog/change-content-type-multiple-nodes-using-sql/).
|
||||
|
||||
**Again, before changing any values within the database, ensure that you have an
|
||||
up-to-date backup which you can restore if you encounter a problem!**
|
||||
|
||||
The first thing I did was create the 'Drupal Planet' term in my Tags vocabulary.
|
||||
I decided to do this via the administration area of my site, and not via the
|
||||
database. Then, using [Sequel Pro](http://www.sequelpro.com), I ran the
|
||||
following SQL query to give me a list of Blog posts on my site - showing just
|
||||
their titles and nid values.
|
||||
|
||||
```language-sql
|
||||
SELECT title, nid FROM node WHERE TYPE = 'blog' ORDER BY title ASC;
|
||||
```
|
||||
|
||||
I made a note of the nid's of the returned nodes, and kept them for later. I
|
||||
then ran a similar query against the term_data table. This returned a list of
|
||||
Taxonomy terms - showing the term's name, and it's unique tid value.
|
||||
|
||||
```language-sql
|
||||
SELECT NAME, tid FROM term_data ORDER BY NAME ASC;
|
||||
```
|
||||
|
||||
The term that I was interested in, Drupal Planet, had the tid of 84. To confirm
|
||||
that no nodes were already assigned a taxonomy term with this tid, I ran another
|
||||
query against the database. I'm using aliases within this query to link the
|
||||
node, term_node and term_data tables. For more information on SQL aliases, take
|
||||
a look at <http://w3schools.com/sql/sql_alias.asp>.
|
||||
|
||||
```language-sql
|
||||
SELECT * FROM node n, term_data td, term_node tn WHERE td.tid = 84 AND n.nid = tn.nid AND tn.tid = td.tid;
|
||||
```
|
||||
|
||||
As expected, it returned no rows.
|
||||
|
||||
The table that links node and term_data is called term_node, and is made up of
|
||||
the nid and vid columns from the node table, as well as the tid column from the
|
||||
term_data table. Is it is here that the additional rows would need to be
|
||||
entered.
|
||||
|
||||
To confirm everything, I ran a simple query against an old post. I know that the
|
||||
only taxonomy term associated with this post is 'Personal', which has a tid
|
||||
value of 44.
|
||||
|
||||
```language-sql
|
||||
SELECT nid, tid FROM term_node WHERE nid = 216;
|
||||
```
|
||||
|
||||
Once the query had confirmed the correct tid value, I began to write the SQL
|
||||
Insert statement that would be needed to add the new term to the required nodes.
|
||||
The nid and vid values were the same on each node, and the value of my taxonomy
|
||||
term would need to be 84.
|
||||
|
||||
Once this had completed with no errors, I returned to the administration area of
|
||||
my Drupal site to confirm whether or not the nodes had been assigned the new
|
||||
term.
|
84
website/src/posts/adding-custom-theme-templates-drupal-7.md
Normal file
84
website/src/posts/adding-custom-theme-templates-drupal-7.md
Normal file
|
@ -0,0 +1,84 @@
|
|||
---
|
||||
title: Adding Custom Theme Templates in Drupal 7
|
||||
date: 2012-04-19
|
||||
excerpt: >
|
||||
Today, I had a situation where I was displaying a list of teasers for news
|
||||
article nodes. The article content type had several different fields assigned
|
||||
to it, including main and thumbnail images. In this case, I wanted to have
|
||||
different output and fields displayed when a teaser was displayed compared to
|
||||
when a complete node was displayed.
|
||||
tags:
|
||||
- drupal
|
||||
- drupal-planet
|
||||
---
|
||||
|
||||
Today, I had a situation where I was displaying a list of teasers for news
|
||||
article nodes. The article content type had several different fields assigned to
|
||||
it, including main and thumbnail images. In this case, I wanted to have
|
||||
different output and fields displayed when a teaser was displayed compared to
|
||||
when a complete node was displayed.
|
||||
|
||||
I have previously seen it done this way by adding this into in a node.tpl.php
|
||||
file:
|
||||
|
||||
```language-php
|
||||
if ($teaser) {
|
||||
// The teaser output.
|
||||
}
|
||||
else {
|
||||
// The whole node output.
|
||||
}
|
||||
```
|
||||
|
||||
However, I decided to do something different and create a separate template file
|
||||
just for teasers. This is done using the hook_preprocess_HOOK function that I
|
||||
can add into my theme's template.php file.
|
||||
|
||||
The function requires the node variables as an argument - one of which is
|
||||
theme_hook_suggestions. This is an array of suggested template files that Drupal
|
||||
looks for and attempts to use when displaying a node, and this is where I'll be
|
||||
adding a new suggestion for my teaser-specific template. Using the `debug()`
|
||||
function, I can easily see what's already there.
|
||||
|
||||
```language-php
|
||||
array (
|
||||
0 => 'node__article',
|
||||
1 => 'node__343',
|
||||
2 => 'node__view__latest_news',
|
||||
3 => 'node__view__latest_news__page',
|
||||
)
|
||||
```
|
||||
|
||||
So, within my theme's template.php file:
|
||||
|
||||
```language-php
|
||||
/**
|
||||
* Implementation of hook_preprocess_HOOK().
|
||||
*/
|
||||
function mytheme_preprocess_node(&$variables) {
|
||||
$node = $variables['node'];
|
||||
|
||||
if ($variables['teaser']) {
|
||||
// Add a new item into the theme_hook_suggestions array.
|
||||
$variables['theme_hook_suggestions'][] = 'node__' . $node->type . '_teaser';
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
After adding the new suggestion:
|
||||
|
||||
```language-php
|
||||
array (
|
||||
0 => 'node__article',
|
||||
1 => 'node__343',
|
||||
2 => 'node__view__latest_news',
|
||||
3 => 'node__view__latest_news__page',
|
||||
4 => 'node__article_teaser',
|
||||
)
|
||||
```
|
||||
|
||||
Now, within my theme I can create a new node--article-teaser.tpl.php template
|
||||
file and this will get called instead of the node--article.tpl.php when a teaser
|
||||
is loaded. As I'm not specifying the node type specifically and using the
|
||||
dynamic <em>\$node->type</em> value within my suggestion, this will also apply
|
||||
for all other content types on my site and not just news articles.
|
106
website/src/posts/announcing-the-drupal-vm-generator.md
Normal file
106
website/src/posts/announcing-the-drupal-vm-generator.md
Normal file
|
@ -0,0 +1,106 @@
|
|||
---
|
||||
title: Announcing the Drupal VM Generator
|
||||
date: 2016-02-15
|
||||
excerpt: For the past few weeks, I’ve been working on a personal side project based on Drupal VM - the Drupal VM Generator.
|
||||
tags:
|
||||
- drupal
|
||||
- drupal-planet
|
||||
- drupal-vm
|
||||
- drupal-vm-generator
|
||||
- symfony
|
||||
---
|
||||
|
||||
For the past few weeks, I’ve been working on a personal side project based on
|
||||
Drupal VM. It’s called the [Drupal VM Generator][1], and over the weekend I’ve
|
||||
added the final features and fixed the remaining issues, and tagged the 1.0.0
|
||||
release.
|
||||
|
||||

|
||||
|
||||
## What is Drupal VM?
|
||||
|
||||
[Drupal VM][2] is a project created and maintained by [Jeff Geerling][3]. It’s a
|
||||
[Vagrant][4] virtual machine for Drupal development that is provisioned using
|
||||
[Ansible][5].
|
||||
|
||||
What is different to a regular Vagrant VM is that uses a file called
|
||||
`config.yml` to configure the machine. Settings such as `vagrant_hostname`,
|
||||
`drupalvm_webserver` and `drupal_core_path` are stored as YAML and passed into
|
||||
the `Vagrantfile` and the `playbook.yml` file which is used when the Ansible
|
||||
provisioner runs.
|
||||
|
||||
In addition to some essential Ansible roles for installing and configuring
|
||||
packages such as Git, MySQL, PHP and Drush, there are also some roles that are
|
||||
conditional and only installed based on the value of other settings. These
|
||||
include Apache, Nginx, Solr, Varnish and Drupal Console.
|
||||
|
||||
## What does the Drupal VM Generator do?
|
||||
|
||||
> The Drupal VM Generator is a Symfony application that allows you to quickly
|
||||
> create configuration files that are minimal and use-case specific.
|
||||
|
||||
Drupal VM comes with an [example.config.yml file][6] that shows all of the
|
||||
default variables and their values. When I first started using it, I’d make a
|
||||
copy of `example.config.yml`, rename it to `config.yml` and edit it as needed,
|
||||
but a lot of the examples aren’t needed for every use case. If you’re using
|
||||
Nginx as your webserver, then you don’t need the Apache virtual hosts. If you
|
||||
are not using Solr on this project, then you don’t need the Solr variables.
|
||||
|
||||
For a few months, I’ve kept and used boilerplace versions of `config.yml` - one
|
||||
for Apache and one for Nginx. These are minimal, so have most of the comments
|
||||
removed and only the variables that I regularly need, but these can still be
|
||||
quite time consuming to edit each time, and if there are additions or changes
|
||||
upstream, then I have two versions to maintain.
|
||||
|
||||
The Drupal VM Generator is a Symfony application that allows you to quickly
|
||||
create configuration files that are minimal and use-case specific. It uses the
|
||||
[Console component][7] to collect input from the user, [Twig][8] to generate the
|
||||
file, the [Filesystem component][9] to write it.
|
||||
|
||||
Based on the options passed to it and/or answers that you provide, it generates
|
||||
a custom, minimal `config.yml` file for your project.
|
||||
|
||||
Here’s an example of it in action:
|
||||
|
||||

|
||||
|
||||
You can also define options when calling the command and skip any or all
|
||||
questions. Running the following would bypass all of the questions and create a
|
||||
new file with no interaction or additional steps.
|
||||
|
||||
{{ gist('24e569577ca4b72f049d', 'with-options.sh') }}
|
||||
|
||||
## Where do I get it?
|
||||
|
||||
The project is hosted on [GitHub][1], and there are installation instructions
|
||||
within the [README][10].
|
||||
|
||||
<div class="github-card" data-github="opdavies/drupal-vm-generator" data-width="400" data-height="" data-theme="default"></div>
|
||||
|
||||
The recommended method is via downloading the phar file (the same as Composer
|
||||
and Drupal Console). You can also clone the GitHub repository and run the
|
||||
command from there. I’m also wanting to upload it to Packagist so that it can be
|
||||
included if you manage your projects with Composer.
|
||||
|
||||
Please log any bugs or feature requests in the [GitHub issue tracker][11], and
|
||||
I’m more than happy to receive pull requests.
|
||||
|
||||
If you’re interested in contributing, please feel free to fork the repository
|
||||
and start doing so, or contact me with any questions.
|
||||
|
||||
**Update 17/02/16:** The autoloading issue is now fixed if you require the
|
||||
package via Composer, and this has been tagged as the [1.0.1 release][12]
|
||||
|
||||
[1]: https://github.com/opdavies/drupal-vm-generator
|
||||
[2]: http://www.drupalvm.com
|
||||
[3]: http://www.jeffgeerling.com
|
||||
[4]: http://www.vagrantup.com
|
||||
[5]: https://www.ansible.com
|
||||
[6]: https://github.com/geerlingguy/drupal-vm/blob/master/example.config.yml
|
||||
[7]: http://symfony.com/doc/current/components/console/introduction.html
|
||||
[8]: http://twig.sensiolabs.org
|
||||
[9]: http://symfony.com/doc/current/components/filesystem/introduction.html
|
||||
[10]:
|
||||
https://github.com/opdavies/drupal-vm-generator/blob/master/README.md#installation
|
||||
[11]: https://github.com/opdavies/drupal-vm-generator/issues
|
||||
[12]: https://github.com/opdavies/drupal-vm-generator/releases/tag/1.0.1
|
192
website/src/posts/automating-sculpin-jenkins.md
Normal file
192
website/src/posts/automating-sculpin-jenkins.md
Normal file
|
@ -0,0 +1,192 @@
|
|||
---
|
||||
title: Automating Sculpin Builds with Jenkins CI
|
||||
date: 2015-07-21
|
||||
excerpt: How to use Jenkins to automate building Sculpin websites.
|
||||
tags:
|
||||
- jenkins
|
||||
- sculpin
|
||||
---
|
||||
|
||||
As part of re-building this site with [Sculpin](http://sculpin.io), I wanted to
|
||||
automate the deployments, as in I wouldn't need to run a script like
|
||||
[publish.sh](https://raw.githubusercontent.com/sculpin/sculpin-blog-skeleton/master/publish.sh)
|
||||
locally and have that deploy my code onto my server. Not only did that mean that
|
||||
my local workflow was simpler (update, commit and push, rather than update,
|
||||
commit, push and deploy), but if I wanted to make a quick edit or hotfix, I
|
||||
could log into GitHub or Bitbucket (wherever I decided to host the source code)
|
||||
from any computer or my phone, make the change and have it deployed for me.
|
||||
|
||||
I'd started using [Jenkins CI](http://jenkins-ci.org) during my time at the
|
||||
Drupal Association, and had since built my own Jenkins server to handle
|
||||
deployments of Drupal websites, so that was the logical choice to use.
|
||||
|
||||
## Installing Jenkins and Sculpin
|
||||
|
||||
If you don’t already have Jenkins installed and configured, I'd suggest using
|
||||
[Jeff Geerling](http://jeffgeerling.com/) (aka geerlingguy)'s
|
||||
[Ansible role for Jenkins CI](https://galaxy.ansible.com/list#/roles/440).
|
||||
|
||||
I've also released an
|
||||
[Ansible role for Sculpin](https://galaxy.ansible.com/list#/roles/4063) that
|
||||
installs the executable so that the Jenkins server can run Sculpin commands.
|
||||
|
||||
## Triggering a Build from a Git Commit
|
||||
|
||||
I created a new Jenkins item for this task, and restricted where it could be run
|
||||
to `master` (i.e. the Jenkins server rather than any of the nodes).
|
||||
|
||||
### Polling from Git
|
||||
|
||||
I entered the url to the
|
||||
[GitHub repo](https://github.com/opdavies/oliverdavies.uk) into the **Source
|
||||
Code Management** section (the Git option _may_ have been added by the
|
||||
[Git plugin](https://wiki.jenkins-ci.org/display/JENKINS/Git+Plugin) that I have
|
||||
installed).
|
||||
|
||||
As we don’t need any write access back to the repo, using the HTTP URL rather
|
||||
than the SSH one was fine, and I didn’t need to provide any additional
|
||||
credentials.
|
||||
|
||||
Also, as I knew that I’d be working a lot with feature branches, I entered
|
||||
`*/master` as the only branch to build. This meant that pushing changes or
|
||||
making edits on any other branches would not trigger a build.
|
||||
|
||||

|
||||
|
||||
I also checked the **Poll SCM** option so that Jenkins would be routinely
|
||||
checking for updated code. This essentially uses the same syntax as cron,
|
||||
specifying minutes, hours etc. I entered `* * * * *` so that Jenkins would poll
|
||||
each minute, knowing that I could make this less frequent if needed.
|
||||
|
||||
This now that Jenkins would be checking for any updates to the repo each minute,
|
||||
and could execute tasks if needed.
|
||||
|
||||
### Building and Deploying
|
||||
|
||||
Within the **Builds** section of the item, I added an _Execute Shell_ step,
|
||||
where I could enter a command to execute. Here, I pasted a modified version of
|
||||
the original publish.sh script.
|
||||
|
||||
```language-bash
|
||||
#!/bin/bash
|
||||
|
||||
set -uex
|
||||
|
||||
sculpin generate --env=prod --quiet
|
||||
if [ $? -ne 0 ]; then echo "Could not generate the site"; exit 1; fi
|
||||
|
||||
rsync -avze 'ssh' --delete output_prod/ prodwww2:/var/www/html/oliverdavies.uk/htdocs
|
||||
if [ $? -ne 0 ]; then echo "Could not publish the site"; exit 1; fi
|
||||
```
|
||||
|
||||
This essentially is the same as the original file, in that Sculpin generates the
|
||||
site, and uses rsync to deploy it somewhere else. In my case, `prodwww2` is a
|
||||
Jenkins node (this alias is configured in `/var/lib/jenkins/.ssh/config`), and
|
||||
`/var/www/html/oliverdavies.uk/htdocs` is the directory from where my site is
|
||||
served.
|
||||
|
||||
## Building Periodically
|
||||
|
||||
There is some dynamic content on my site, specifically on the Talks page. Each
|
||||
talk has a date assigned to it, and within the Twig template, the talk is
|
||||
positoned within upcoming or previous talks based on whether this date is less
|
||||
or greater than the time of the build.
|
||||
|
||||
The YAML front matter:
|
||||
|
||||
```language-yaml
|
||||
---
|
||||
...
|
||||
talks:
|
||||
- title: Test Drive Twig with Sculpin
|
||||
location: DrupalCamp North
|
||||
---
|
||||
```
|
||||
|
||||
The Twig layout:
|
||||
|
||||
```language-twig
|
||||
|
||||
{% for talk in talks|reverse if talk.date >= now %}
|
||||
{# Upcoming talks #}
|
||||
{% endfor %}
|
||||
|
||||
{% for talk in talks if talk.date < now %}
|
||||
{# Previous talks #}
|
||||
{% endfor%}
|
||||
|
||||
```
|
||||
|
||||
I also didn’t want to have to push an empty commit or manually trigger a job in
|
||||
Jenkins after doing a talk in order for it to be positioned in the correct place
|
||||
on the page, so I also wanted Jenkins to schedule a regular build regardless of
|
||||
whether or not code had been pushed, so ensure that my talks page would be up to
|
||||
date.
|
||||
|
||||
After originally thinking that I'd have to split the build steps into a separate
|
||||
item and trigger that from a scheduled item, and amend my git commit item
|
||||
accordingly, I found a **Build periodically** option that I could use within the
|
||||
same item, leaving it intact and not having to make amends.
|
||||
|
||||
I set this to `@daily` (the same `H H * * *` - `H` is a Jenkins thing), so that
|
||||
the build would be triggered automatically each day without a commit, and deploy
|
||||
any updates to the site.
|
||||
|
||||

|
||||
|
||||
## Next Steps
|
||||
|
||||
This workflow works great for one site, but as I roll out more Sculpin sites,
|
||||
I'd like to reduce duplication. I see this mainly as I’ll end up creating a
|
||||
separate `sculpin_build` item that’s decoupled from the site that it’s building,
|
||||
and instead passing variables such as environment, server name and docroot path
|
||||
as parameters in a parameterized build.
|
||||
|
||||
I'll probably also take the raw shell script out of Jenkins and save it in a
|
||||
text file that's stored locally on the server, and execute that via Jenkins.
|
||||
This means that I’d be able to store this file in a separate Git repository with
|
||||
my other Jenkins scripts and get the standard advantages of using version
|
||||
control.
|
||||
|
||||
## Update
|
||||
|
||||
Since publishing this post, I've added some more items to the original build
|
||||
script.
|
||||
|
||||
### Updating Composer
|
||||
|
||||
```language-bash
|
||||
if [ -f composer.json ]; then
|
||||
/usr/local/bin/composer install
|
||||
fi
|
||||
```
|
||||
|
||||
Updates project dependencies via
|
||||
[Composer](https://getcomposer.org/doc/00-intro.md#introduction) if
|
||||
composer.json exists.
|
||||
|
||||
### Updating Sculpin Dependencies
|
||||
|
||||
```language-bash
|
||||
if [ -f sculpin.json ]; then
|
||||
sculpin install
|
||||
fi
|
||||
```
|
||||
|
||||
Runs `sculpin install` on each build if the sculpin.json file exists, to ensure
|
||||
that the required custom bundles and dependencies are installed.
|
||||
|
||||
### Managing Redirects
|
||||
|
||||
```language-bash
|
||||
if [ -f scripts/redirects.php ]; then
|
||||
/usr/bin/php scripts/redirects.php
|
||||
fi
|
||||
```
|
||||
|
||||
I've been working on a `redirects.php` script that generates redirects from a
|
||||
.csv file, after seeing similar things in the
|
||||
[Pantheon Documentation](https://github.com/pantheon-systems/documentation) and
|
||||
[That Podcast](https://github.com/thatpodcast/thatpodcast.io) repositories. This
|
||||
checks if that file exists, and if so, runs it and generates the source file
|
||||
containing each redirect.
|
72
website/src/posts/back-future-gits-diff-apply-commands.md
Normal file
72
website/src/posts/back-future-gits-diff-apply-commands.md
Normal file
|
@ -0,0 +1,72 @@
|
|||
---
|
||||
title: Back to the future with Git’s diff and apply commands
|
||||
date: 2018-04-23
|
||||
excerpt: How to revert files using Git, but as a new commit to prevent force pushing.
|
||||
tags:
|
||||
- git
|
||||
---
|
||||
|
||||
This is one of those “there’s probably already a better way to do this”
|
||||
situations, but it worked.
|
||||
|
||||
I was having some issues this past weekend where, despite everything working
|
||||
fine locally, a server was showing a “500 Internal Server” after I pushed some
|
||||
changes to a site. In order to bring the site back online, I needed to revert
|
||||
the site files back to the previous version, but as part of a new commit.
|
||||
|
||||
The `git reset` commands removed the interim commits which meant that I couldn’t
|
||||
push to the remote (force pushing, quite rightly, isn’t allowed for the
|
||||
production branch), and using `git revert` was resulting in merge conflicts in
|
||||
`composer.lock` that I’d rather have avoided if possible.
|
||||
|
||||
This is what `git log --oneline -n 4` was outputting:
|
||||
|
||||
```
|
||||
14e40bc Change webflo/drupal-core-require-dev version
|
||||
fc058bb Add services.yml
|
||||
60bcf33 Update composer.json and re-generate lock file
|
||||
722210c More styling
|
||||
```
|
||||
|
||||
`722210c` is the commit SHA that I needed to go back to.
|
||||
|
||||
## First Solution
|
||||
|
||||
My first solution was to use `git diff` to create a single patch file of all of
|
||||
the changes from the current point back to the original commit. In this case,
|
||||
I’m using `head~3` (four commits before `head`) as the original reference, I
|
||||
could have alternatively used a commit ID, tag or branch name.
|
||||
|
||||
```
|
||||
git diff head head~3 > temp.patch
|
||||
git apply -v temp.patch
|
||||
```
|
||||
|
||||
With the files are back in the former state, I can remove the patch, add the
|
||||
files as a new commit and push them to the remote.
|
||||
|
||||
```
|
||||
rm temp.patch
|
||||
|
||||
git add .
|
||||
git commit -m 'Back to the future'
|
||||
git push
|
||||
```
|
||||
|
||||
Although the files are back in their previous, working state, as this is a new
|
||||
commit with a new commit SHA reference, there is no issue with the remote
|
||||
rejecting the commit or needing to attempt to force push.
|
||||
|
||||
## Second Solution
|
||||
|
||||
The second solution is just a shorter, cleaner version of the first!
|
||||
|
||||
Rather than creating a patch file and applying it, the output from `git diff`
|
||||
can be piped straight into `git apply`.
|
||||
|
||||
```
|
||||
git diff head~3 head | git apply -v
|
||||
```
|
||||
|
||||
This means that there’s only one command to run and no leftover patch file, and
|
||||
I can go ahead and add and commit the changes straight away.
|
102
website/src/posts/building-gmail-filters-in-php.md
Normal file
102
website/src/posts/building-gmail-filters-in-php.md
Normal file
|
@ -0,0 +1,102 @@
|
|||
---
|
||||
title: Building Gmail Filters with PHP
|
||||
date: 2016-07-15
|
||||
excerpt: How to use PHP to generate and export filters for Gmail.
|
||||
tags:
|
||||
- gmail
|
||||
- php
|
||||
promoted: true
|
||||
---
|
||||
|
||||
Earlier this week I wrote a small PHP library called [GmailFilterBuilder][0]
|
||||
that allows you to write Gmail filters in PHP and export them to XML.
|
||||
|
||||
I was already aware of a Ruby library called [gmail-britta][1] that does the
|
||||
same thing, but a) I’m not that familiar with Ruby so the syntax wasn’t that
|
||||
natural to me - it’s been a while since I wrote any Puppet manifests, and b) it
|
||||
seemed like a interesting little project to work on one evening.
|
||||
|
||||
The library contains two classes - `GmailFilter` which is used to create each
|
||||
filter, and `GmailFilterBuilder` that parses the filters and generates the XML
|
||||
using a [Twig][2] template.
|
||||
|
||||
## Usage
|
||||
|
||||
For example:
|
||||
|
||||
```language-php
|
||||
# test.php
|
||||
|
||||
require __DIR__ '/vendor/autoload.php';
|
||||
|
||||
use Opdavies\GmailFilterBuilder\Builder;
|
||||
use Opdavies\GmailFilterBuilder\Filter;
|
||||
|
||||
$filters = [];
|
||||
|
||||
$filters[] = Filter::create()
|
||||
->has('from:example@test.com')
|
||||
->labelAndArchive('Test')
|
||||
->neverSpam();
|
||||
|
||||
new Builder($filters);
|
||||
```
|
||||
|
||||
In this case, an email from `example@test.com` would be archived, never marked
|
||||
as spam, and have a label of "Test" added to it.
|
||||
|
||||
With this code written, and the GmailFilterBuilder library installed via
|
||||
Composer, I can run `php test.php` and have the XML written to the screen.
|
||||
|
||||
This can also be written to a file - `php test.php > filters.xml` - which can
|
||||
then be imported into Gmail.
|
||||
|
||||
## Twig Extensions
|
||||
|
||||
I also added a custom Twig extension that I moved into a separate
|
||||
[twig-extensions][5] library so that I and other people can re-use it in other
|
||||
projects.
|
||||
|
||||
It’s a simple filter that accepts a boolean and returns `true` or `false` as a
|
||||
string, but meant that I could remove three ternary operators from the template
|
||||
and replace them with the `boolean_string` filter.
|
||||
|
||||
Before:
|
||||
|
||||
<div v-pre markdown="1">
|
||||
```language-twig
|
||||
{{ filter.isArchive ? 'true' : 'false' }}
|
||||
```
|
||||
</div>
|
||||
|
||||
After:
|
||||
|
||||
<div v-pre markdown="1">
|
||||
```language-twig
|
||||
{{ filter.isArchive|boolean_string }}
|
||||
```
|
||||
</div>
|
||||
|
||||
This can then be used to generate output like this, whereas having blank values
|
||||
would have resulted in errors when importing to Gmail.
|
||||
|
||||
```language-xml
|
||||
<apps:property name='shouldArchive' value='true'/>
|
||||
```
|
||||
|
||||
## Example
|
||||
|
||||
For a working example, see my personal [gmail-filters][3] repository on GitHub.
|
||||
|
||||
## Resources
|
||||
|
||||
- [The GmailFilterBuilder library on Packagist][4]
|
||||
- [My Gmail filters on GitHub][3]
|
||||
- [My Twig Extensions on Packagist][5]
|
||||
|
||||
[0]: https://github.com/opdavies/gmail-filter-builder
|
||||
[1]: https://github.com/antifuchs/gmail-britta
|
||||
[2]: http://twig.sensiolabs.org
|
||||
[3]: https://github.com/opdavies/gmail-filters
|
||||
[4]: https://packagist.org/packages/opdavies/gmail-filter-builder
|
||||
[5]: https://packagist.org/packages/opdavies/twig-extensions
|
|
@ -0,0 +1,18 @@
|
|||
---
|
||||
title: 'Building oliverdavies.uk with Sculpin: Part 1 - initial setup and configuration'
|
||||
excerpt: |
|
||||
First part of the "Building oliverdavies.uk" series, covering the initial
|
||||
Sculpin setup and configuration.
|
||||
tags: [sculpin]
|
||||
draft: true
|
||||
---
|
||||
|
||||
Based on <https://github.com/opdavies/sculpin-skeleton>.
|
||||
|
||||
Uses <https://github.com/opdavies/docker-image-sculpin-serve>.
|
||||
|
||||
`app/config/sculpin_kernel.yml`:
|
||||
|
||||
`app/config/sculpin_site.yml`:
|
||||
|
||||
`app/config/sculpin_site_prod.yml`:
|
37
website/src/posts/building-the-new-phpsw-website.md
Normal file
37
website/src/posts/building-the-new-phpsw-website.md
Normal file
|
@ -0,0 +1,37 @@
|
|||
---
|
||||
title: Building the new PHPSW Website
|
||||
date: 2018-02-28
|
||||
excerpt:
|
||||
Earlier this week we had another hack night, working on the new PHPSW user
|
||||
group website.
|
||||
tags:
|
||||
- phpsw
|
||||
- symfony
|
||||
- tailwind-css
|
||||
has_tweets: true
|
||||
---
|
||||
|
||||
Earlier this week we had another hack night, working on the new [PHPSW user
|
||||
group][0] website.
|
||||
|
||||
<div class="mb-4">
|
||||
<blockquote class="twitter-tweet" data-lang="en"><p lang="en" dir="ltr">Hacking away on the new <a href="https://twitter.com/phpsw?ref_src=twsrc%5Etfw">@phpsw</a> website with <a href="https://twitter.com/DaveLiddament?ref_src=twsrc%5Etfw">@DaveLiddament</a> and <a href="https://twitter.com/kasiazien?ref_src=twsrc%5Etfw">@kasiazien</a>. <a href="https://t.co/kmfjdQSOUq">pic.twitter.com/kmfjdQSOUq</a></p>— Oliver Davies (@opdavies) <a href="https://twitter.com/opdavies/status/968224364129906688?ref_src=twsrc%5Etfw">February 26, 2018</a></blockquote>
|
||||
</div>
|
||||
|
||||
It’s built with Symfony so it’s naturally using Twig for templating. I’ve become
|
||||
a big fan of the utility based approach to CSS and [Tailwind CSS][1] in
|
||||
particular, so I’m using that for all of the styling, and using [Webpack
|
||||
Encore][2] to compile all of the assets.
|
||||
|
||||
We have an integration with Meetup.com which we’re using to pull all of our
|
||||
previous event data and store them as JSON files for Symfony to parse and
|
||||
render, which it then uses to generate static HTML to upload onto the server.
|
||||
|
||||
We’re in the process of populating all of the past data, but look out for a v1
|
||||
launch soon. In the meantime, feel free to take a peek at our [GitHub
|
||||
repository][3].
|
||||
|
||||
[0]: https://phpsw.uk
|
||||
[1]: https://tailwindcss.com
|
||||
[2]: https://github.com/symfony/webpack-encore
|
||||
[3]: https://github.com/phpsw/phpsw-ng
|
|
@ -0,0 +1,42 @@
|
|||
---
|
||||
title: Change the Content Type of Multiple Nodes Using SQL
|
||||
date: 2010-07-01
|
||||
excerpt:
|
||||
In this post, I will be changing values within my Drupal 6 site's database to quickly change the content type of multiple nodes.
|
||||
tags:
|
||||
- content-types
|
||||
- database
|
||||
- drupal
|
||||
- drupal-6
|
||||
- drupal-planet
|
||||
- sequel-pro
|
||||
- sql
|
||||
---
|
||||
|
||||
In this post, I will be changing values within my Drupal 6 site's database to
|
||||
quickly change the content type of multiple nodes. I will be using a test
|
||||
development site with the core Blog module installed, and converting Blog posts
|
||||
to a custom content type called 'News article'.
|
||||
|
||||
**Before changing any values within the database, ensure that you have an
|
||||
up-to-date backup which you can restore if you encounter a problem!**
|
||||
|
||||
To begin with, I created the 'News article' content type, and then used the
|
||||
Devel Generate module to generate some Blog nodes.
|
||||
|
||||
Using [Sequel Pro](http://www.sequelpro.com), I can query the database to view
|
||||
the Blog posts (you can also do this via the
|
||||
[Terminal](http://guides.macrumors.com/Terminal) in a Mac OS X/Linux,
|
||||
[Oracle SQL Developer](http://www.oracle.com/technology/software/products/sql/index.html)
|
||||
on Windows, or directly within
|
||||
[phpMyAdmin](http://www.phpmyadmin.net/home_page/index.php)):
|
||||
|
||||
Using an SQL 'Update' command, I can change the type value from 'blog' to
|
||||
'article'. This will change every occurance of the value 'blog'. If I wanted to
|
||||
only change certain nodes, I could add a 'Where' clause to only affect nodes
|
||||
with a certain nid or title.
|
||||
|
||||
Now, when I query the database, the type is shown as 'article'.
|
||||
|
||||
Now, when I go back into the administration section of my site and view the
|
||||
content, the content type now shows at 'News article'.
|
|
@ -0,0 +1,68 @@
|
|||
---
|
||||
title: Checking if a user is logged into Drupal (the right way)
|
||||
date: 2013-01-09
|
||||
excerpt: How to check if a user is logged in by using Drupal core API functions.
|
||||
tags:
|
||||
- drupal
|
||||
- drupal-6
|
||||
- drupal-7
|
||||
- drupal-planet
|
||||
- php
|
||||
---
|
||||
|
||||
I see this regularly when working on Drupal sites when someone wants to check
|
||||
whether the current user is logged in to Drupal (authenticated) or not
|
||||
(anonymous).
|
||||
|
||||
```language-php
|
||||
global $user;
|
||||
if ($user->uid) {
|
||||
// The user is logged in.
|
||||
}
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```language-php
|
||||
global $user;
|
||||
if (!$user->uid) {
|
||||
// The user is not logged in.
|
||||
}
|
||||
```
|
||||
|
||||
The better way to do this is to use the
|
||||
[user_is_logged_in()](http://api.drupal.org/api/drupal/modules!user!user.module/function/user_is_logged_in/7)
|
||||
function.
|
||||
|
||||
```language-php
|
||||
if (user_is_logged_in()) {
|
||||
// Do something.
|
||||
}
|
||||
```
|
||||
|
||||
This returns a boolean (TRUE or FALSE) depending or not the user is logged in.
|
||||
Essentially, it does the same thing as the first example, but there's no need to
|
||||
load the global variable.
|
||||
|
||||
A great use case for this is within a `hook_menu()` implementation within a
|
||||
custom module.
|
||||
|
||||
```language-php
|
||||
/**
|
||||
* Implements hook_menu().
|
||||
*/
|
||||
function mymodule_menu() {
|
||||
$items['foo'] = array(
|
||||
'title' => 'Foo',
|
||||
'page callback' => 'mymodule_foo',
|
||||
'access callback' => 'user_is_logged_in',
|
||||
);
|
||||
|
||||
return $items;
|
||||
}
|
||||
```
|
||||
|
||||
There is also a
|
||||
[user_is_anonymous()](http://api.drupal.org/api/drupal/modules!user!user.module/function/user_is_anonymous/7)
|
||||
function if you want the opposite result. Both of these functions are available
|
||||
in Drupal 6 and higher.
|
|
@ -0,0 +1,22 @@
|
|||
---
|
||||
title: Checkout a specific revision from SVN from the command line
|
||||
date: 2012-05-23
|
||||
excerpt: How to checkout a specific revision from a SVN (Subversion) repository.
|
||||
tags:
|
||||
- svn
|
||||
- version-control
|
||||
---
|
||||
|
||||
How to checkout a specific revision from a SVN (Subversion) repository.
|
||||
|
||||
If you're checking out the repository for the first time:
|
||||
|
||||
```language-bash
|
||||
$ svn checkout -r 1234 url://repository/path
|
||||
```
|
||||
|
||||
If you already have the repository checked out:
|
||||
|
||||
```language-bash
|
||||
$ svn up -r 1234
|
||||
```
|
|
@ -0,0 +1,38 @@
|
|||
---
|
||||
title: Cleanly retrieving user profile data using an Entity Metadata Wrapper
|
||||
excerpt: How to use Drupal 7's EntityMetadataWrapper to cleanly retrieve user profile field data.
|
||||
tags:
|
||||
- drupal
|
||||
- drupal-7
|
||||
- drupal planet
|
||||
- php
|
||||
date: 2021-02-23
|
||||
---
|
||||
|
||||
Today I needed to load some Drupal user data via a [profile2](https://www.drupal.org/project/profile2) profile. When looking into this, most resources that I found suggest using this approach and calling the `profile2_load_by_user()` function directly and passing in the user object:
|
||||
|
||||
|
||||
```php
|
||||
$account = user_load(...);
|
||||
|
||||
$accountWrapper = new EntityDrupalWrapper('user', $account);
|
||||
// or `$accountWrapper = entity_metadata_wrapper('user', $account);
|
||||
|
||||
$profile = profile2_load_by_user($account->value());
|
||||
// or `$profile = profile2_load_by_user($account);`
|
||||
|
||||
$profileWrapper = new EntityDrupalWrapper('profile2', $profile);
|
||||
|
||||
$firstName = $profileWrapper->get('field_first_name')->value();
|
||||
```
|
||||
|
||||
This though requires a few steps, and as I'm a fan of object-orientated code and Entity Metadata Wrappers, I wanted to find a cleaner solution.
|
||||
|
||||
This is my preferred method that uses method chaining. It returns the same value, is less code, and in my opinion, it's cleaner and easier to read.
|
||||
|
||||
```php
|
||||
$firstName = $accountWrapper
|
||||
->get('profile_user_basic')
|
||||
->get('field_first_name')
|
||||
->value();
|
||||
```
|
29
website/src/posts/conditional-email-addresses-webform.md
Normal file
29
website/src/posts/conditional-email-addresses-webform.md
Normal file
|
@ -0,0 +1,29 @@
|
|||
---
|
||||
title: Conditional Email Addresses in a Webform
|
||||
date: 2010-05-06
|
||||
excerpt:
|
||||
How to send webform emails to a different email address based on another
|
||||
field.
|
||||
tags:
|
||||
- conditional-email
|
||||
- drupal-6
|
||||
- drupal-planet
|
||||
- webform
|
||||
---
|
||||
|
||||
I created a new Webform to serve as a simple Contact form, but left the main
|
||||
configuration until after I created the form components. I added 'Name',
|
||||
'Email', 'Subject' and 'Message' fields, as well as a 'Category' select list.
|
||||
Below 'Options', I entered each of my desired options in the following format:
|
||||
|
||||
```language-ini
|
||||
Email address|Visible name
|
||||
```
|
||||
|
||||
I went back to the form configuration page and expanded 'Conditional Email
|
||||
Recipients', and selected my Category. Note that the standard 'Email To' field
|
||||
above it needs to be empty. Originally, I made the mistake of leaving addresses
|
||||
in that field which resulted in people being sent emails regardles of which
|
||||
category was selected. I then configured the rest of the form.
|
||||
|
||||
Then, when I went to the finished form, the category selection was available.
|
70
website/src/posts/configuring-the-reroute-email-module.md
Normal file
70
website/src/posts/configuring-the-reroute-email-module.md
Normal file
|
@ -0,0 +1,70 @@
|
|||
---
|
||||
title: Configuring the Reroute Email Module
|
||||
date: 2014-12-22
|
||||
excerpt:
|
||||
How to configure the Reroute Email module, to prevent sending emails to real
|
||||
users from your pre-production sites!
|
||||
tags:
|
||||
- drupal
|
||||
- drupal-6
|
||||
- drupal-7
|
||||
- drupal-planet
|
||||
- email
|
||||
draft: true
|
||||
---
|
||||
|
||||
[Reroute Email](https://www.drupal.org/project/reroute_email) module uses
|
||||
`hook_mail_alter()` to prevent emails from being sent to users from
|
||||
non-production sites. It allows you to enter one or more email addresses that
|
||||
will receive the emails instead of delivering them to the original user.
|
||||
|
||||
> This is useful in case where you do not want email sent from a Drupal site to
|
||||
> reach the users. For example, if you copy a live site to a test site for the
|
||||
> purpose of development, and you do not want any email sent to real users of
|
||||
> the original site. Or you want to check the emails sent for uniform
|
||||
> formatting, footers, ...etc.
|
||||
|
||||
As we don't need the module configured on production (we don't need to reroute
|
||||
any emails there), it's best to do this in code using settings.local.php (if you
|
||||
have one) or the standard settings.php file.
|
||||
|
||||
The first thing that we need to do is to enable rerouting. Without doing this,
|
||||
nothing will happen.
|
||||
|
||||
```language-php
|
||||
$conf['reroute_email_enable'] = TRUE;
|
||||
```
|
||||
|
||||
The next option is to whether to show rerouting description in mail body. I
|
||||
usually have this enabled. Set this to TRUE or FALSE depending on your
|
||||
preference.
|
||||
|
||||
```language-php
|
||||
$conf['reroute_email_enable_message'] = TRUE;
|
||||
```
|
||||
|
||||
The last setting is the email address to use. If you're entering a single
|
||||
address, you can add it as a simple string.
|
||||
|
||||
```language-php
|
||||
$conf['reroute_email_address'] = 'person1@example.com';
|
||||
```
|
||||
|
||||
In this example, all emails from the site will be rerouted to
|
||||
person1@example.com.
|
||||
|
||||
If you want to add multiple addresses, these should be added in a
|
||||
semicolon-delimited list. Whilst you could add these also as a string, I prefer
|
||||
to use an array of addresses and the `implode()` function.
|
||||
|
||||
```language-php
|
||||
$conf['reroute_email_address'] = implode(';', array(
|
||||
'person1@example.com',
|
||||
'person2@example.com',
|
||||
'person3@example.com',
|
||||
));
|
||||
```
|
||||
|
||||
In this example, person2@example.com and person3@example.com would receive their
|
||||
emails from the site as normal. Any emails to addresses not in the array would
|
||||
continue to be redirected to person1@example.com.
|
|
@ -0,0 +1,46 @@
|
|||
---
|
||||
title: Continuous Integration vs Continuous Integration
|
||||
excerpt: My views on the definitions of "continuous integration".
|
||||
tags:
|
||||
- git
|
||||
date: 2021-10-07
|
||||
---
|
||||
|
||||

|
||||
|
||||
There seem to be two different definitions for the term "continuous integration" (or "CI") that I've come across whilst reading blogs, listening to podcasts, and watching video tutorials.
|
||||
|
||||
## Tooling
|
||||
|
||||
The first is around remote tools such as GitHub Actions, GitLab CI, Bitbucket Pipelines, Circle CI, and Jenkins, which automatically run tasks whenever you push or merge (or "integrate") code - such as code linting, performing static analysis checks, running automated tests, or building a deployment artifact.
|
||||
|
||||
These focus on code quality and replicate steps that you can run locally, ensuring that the build is successful and that if the CI checks pass then the code can be deployed.
|
||||
|
||||
My issue with this definition is that it may not be continuous. You could push code once a day or once a year, and it would perform the same checks and have the same outcomes and benefits.
|
||||
|
||||
## Workflow
|
||||
|
||||
The second definition isn't about tools - it's about how often you update, merge and push code (which commonly leads to feature branch vs trunk-based development, and Git Flow vs GitHub Flow discussions). How often are you pulling in the latest code, testing it with your local changes, and pushing your code for everyone else to see?
|
||||
|
||||
If you're using feature branches, how long do they last, and how quickly are they merged into the main branch?
|
||||
|
||||
Weekly? Daily? Hourly?
|
||||
|
||||
The workflow definition doesn't need GitHub, GitLab, or Bitbucket to run checks - it's about keeping your local code continuously (or as often as possible) updated and integrated with the remote code.
|
||||
|
||||
This ensures that you're developing from the latest stable version and not one that is days or weeks out of date.
|
||||
|
||||
This means that merge conflicts and much less common as you're always pulling in the latest code and ensuring that it can be integrated.
|
||||
|
||||
## Conclusion
|
||||
|
||||
One definition isn't dependent on the other.
|
||||
|
||||
You don't need the tooling and automation to use a continuous integration workflow, but I'd recommend it. It's useful to know and have confidence that the build passes, especially if you're pulling and pushing code several times a day, but it isn't a prerequisite.
|
||||
|
||||
If you're working on a new feature or fixing a bug, pull down the latest code,
|
||||
test your changes, and push it back as often as possible.
|
||||
|
||||
If you watch a video, read a blog post, or listen to a podcast about continuous integration or "How to set up CI", remember that it's not just about the tooling.
|
||||
|
||||
There's a different workflow and mindset to consider that introduces other complementary concepts such as automated testing and test-driven development, pair and mob programming, feature flags, and continuous delivery.
|
161
website/src/posts/create-better-photo-gallery-drupal-part-1.md
Normal file
161
website/src/posts/create-better-photo-gallery-drupal-part-1.md
Normal file
|
@ -0,0 +1,161 @@
|
|||
---
|
||||
title: Create a Better Photo Gallery in Drupal - Part 1
|
||||
date: 2010-08-11
|
||||
excerpt:
|
||||
How I started converting and migrating a Coppermine photo gallery into Drupal.
|
||||
tags:
|
||||
- cck
|
||||
- drupal
|
||||
- drupal-6
|
||||
- drupal-planet
|
||||
- photo-gallery
|
||||
- sequel-pro
|
||||
- sql
|
||||
- views
|
||||
- views-attach
|
||||
---
|
||||
|
||||
Recently, I converted a client's static HTML website, along with their
|
||||
Coppermine Photo Gallery, into a Drupal-powered website.
|
||||
|
||||
Over the next few posts, I'll be replicating the process that I used during the
|
||||
conversion, and how I added some additional features to my Drupal gallery.
|
||||
|
||||
To begin with, I created my photo gallery as described by
|
||||
[Jeff Eaton](http://www.lullabot.com/about/team/jeff-eaton) in
|
||||
[this screencast](http://www.lullabot.com/articles/photo-galleries-views-attach),
|
||||
downloaded all my client's previous photos via FTP, and quickly added them into
|
||||
the new gallery using the
|
||||
[Imagefield Import](http://drupal.org/project/imagefield_import) module (which I
|
||||
mentioned
|
||||
[previously](/blog/quickly-import-multiples-images-using-imagefieldimport-module/)).
|
||||
|
||||
When I compare this to the previous gallery, I can see several differences which
|
||||
I'd like to include. The first of which is the number of photos in each gallery,
|
||||
and the date that the most recent photo was added.
|
||||
|
||||
To do this, I'd need to query my website's database. To begin with, I wanted to
|
||||
have a list of all the galleries on my site which are published, and what
|
||||
they're unique node ID values are. To do this, I opened Sequel Pro and entered
|
||||
the following code:
|
||||
|
||||
```language-sql
|
||||
SELECT title
|
||||
AS title, nid
|
||||
AS gallery_idFROM node
|
||||
WHERE type = 'gallery'
|
||||
AND status = 1;
|
||||
```
|
||||
|
||||
As the nid value of each gallery corresponds with the 'field_gallery_nid' field
|
||||
within the content_type_photo field, I can now query the database and retrieve
|
||||
information about each specific gallery.
|
||||
|
||||
For example, using [aliasing](http://www.w3schools.com/sql/sql_alias.asp) within
|
||||
my SQL statement, I can retrieve a list of all the published photos within the
|
||||
'British Squad 2008' gallery by using the following code:
|
||||
|
||||
```language-sql
|
||||
SELECT n.title, n.nid, p.field_gallery_nid
|
||||
FROM node n, content_type_photo p
|
||||
WHERE p.field_gallery_nid = 105
|
||||
AND n.status = 1
|
||||
AND n.nid = p.nid;
|
||||
```
|
||||
|
||||
I can easily change this to count the number of published nodes by changing the
|
||||
first line of the query to read SELECT COUNT(\*).
|
||||
|
||||
```language-sql
|
||||
SELECT COUNT(*)
|
||||
FROM node n, content_type_photo p
|
||||
WHERE p.field_gallery_nid = 105
|
||||
AND n.status = 1
|
||||
AND n.nid = p.nid;
|
||||
```
|
||||
|
||||
As I've used the [Views Attach](http://drupal.org/project/views_attach) module,
|
||||
and I'm embedding the photos directly into the Gallery nodes, I easily add this
|
||||
to each gallery by creating a custom node-gallery.tpl.php file within my theme.
|
||||
I can then use the following PHP code to retrieve the node ID for that specific
|
||||
gallery:
|
||||
|
||||
```language-php
|
||||
<?php
|
||||
$selected_gallery = db_result(db_query("
|
||||
SELECT nid
|
||||
FROM {node}
|
||||
WHERE type = 'gallery'
|
||||
AND title = '$title'
|
||||
"));
|
||||
?>
|
||||
```
|
||||
|
||||
I can then use this variable as part of my next query to count the number of
|
||||
photos within that gallery, similar to what I did earlier.
|
||||
|
||||
```language-php
|
||||
<?php
|
||||
$gallery_total = db_result(db_query("
|
||||
SELECT COUNT(*)
|
||||
FROM {content_type_photo}
|
||||
WHERE field_gallery_nid = $selected_gallery
|
||||
"));
|
||||
?>
|
||||
```
|
||||
|
||||
Next, I wanted to display the date that the last photo was displayed within each
|
||||
album. This was done by using a similar query that also sorted the results in a
|
||||
descending order, and limited it to one result - effectively only returning the
|
||||
created date for the newest photo.
|
||||
|
||||
```language-php
|
||||
<?php
|
||||
$latest_photo = db_result(db_query("
|
||||
SELECT n.created
|
||||
FROM {node} n, {content_type_photo} p
|
||||
WHERE p.field_gallery_nid = $selected_gallery
|
||||
AND n.nid = p.nid
|
||||
ORDER BY n.created DESC LIMIT 1
|
||||
"));
|
||||
?>
|
||||
```
|
||||
|
||||
This was all then added into a 'print' statement which displayed it into the
|
||||
page.
|
||||
|
||||
```language-php
|
||||
<?php
|
||||
if ($selected_gallery_total != 0) {
|
||||
$output = '<i>There are currently ' . $selected_gallery_total . ' photos in this gallery.';
|
||||
$output .= 'Last one added on ' . $latest_photo . '</i>';
|
||||
print $output;
|
||||
}
|
||||
?>
|
||||
```
|
||||
|
||||
OK, so let's take a look at the Gallery so far:
|
||||
|
||||
You will notice that the returned date value for the latest photo added is
|
||||
displaying the UNIX timestamp instead of in a more readable format. This can be
|
||||
changed by altering the 'print' statement to include a PHP 'date' function:
|
||||
|
||||
```language-php
|
||||
<?php
|
||||
if ($selected_gallery_total != 0) {
|
||||
$output = '<i>There are currently ' . $selected_gallery_total . ' photos in this gallery.';
|
||||
$output .= 'Last one added on ' . date("l, jS F, Y", $latest_photo) . '.</i>';
|
||||
print $output;
|
||||
}
|
||||
?>
|
||||
```
|
||||
|
||||
The values that I've entered are from
|
||||
[this page](http://php.net/manual/en/function.date.php) on PHP.net, and can be
|
||||
changed according on how you want the date to be displayed.
|
||||
|
||||
As I've added all of these photos today, then the correct dates are being
|
||||
displayed. However, on the client's original website, the majority of these
|
||||
photos were pubished several months or years ago, and I'd like the new website
|
||||
to still reflect the original created dates. As opposed to modifying each
|
||||
individual photograph, I'll be doing this in bulk in my next post.
|
|
@ -0,0 +1,58 @@
|
|||
---
|
||||
title: Create a Better Photo Gallery in Drupal - Part 2
|
||||
date: 2010-08-17
|
||||
excerpt: Updating the galleries’ created and modified dates.
|
||||
tags:
|
||||
- drupal-6
|
||||
- drupal-planet
|
||||
- photo-gallery
|
||||
- sequel-pro
|
||||
- sql
|
||||
---
|
||||
|
||||
At the end of my last post, I'd finished creating the first part of the new
|
||||
photo gallery, but I wanted to change the dates of the published photos to
|
||||
reflect the ones on the client's original website.
|
||||
|
||||
Firstly, I'll refer to the previous list of published galleries that I created
|
||||
before, and create something different that also displays the created and
|
||||
modified dates. Picking the node ID of the required gallery, I used the
|
||||
following SQL query to display a list of photos.
|
||||
|
||||
```language-sql
|
||||
SELECT n.title, n.nid, n.created, n.changed, p.field_gallery_nid
|
||||
FROM node n, content_type_photo pWHERE n.type = 'photo'
|
||||
AND p.field_gallery_nid = 103AND n.nid = p.nid
|
||||
ORDER BY n.nid ASC;
|
||||
```
|
||||
|
||||
When I look back at the old photo gallery, I can see that the previous 'last
|
||||
added' date was June 27, 2008. So, how do I update my new photos to reflect that
|
||||
date? Using <http://www.onlineconversion.com/unix_time.htm>, I can enter the
|
||||
required date in its readable format, and it will give me the equivilent UNIX
|
||||
timestamp. To keep things relatively simple, I'll set all photos within this
|
||||
gallery to the same time.
|
||||
|
||||
The result that I'm given is '1217149200'. I can now use an UPDATE statement
|
||||
within another SQL query to update the created and modified dates.
|
||||
|
||||
```language-sql
|
||||
UPDATE node
|
||||
INNER JOIN content_type_photo
|
||||
ON node.nid = content_type_photo.nid
|
||||
SET
|
||||
node.created = 1217149200,
|
||||
node.changed = 1217149200
|
||||
WHERE content_type_photo.field_gallery_nid = 103
|
||||
```
|
||||
|
||||
Now when I query the database, both the created and modified dates have been
|
||||
updated, and when I return to the new photo gallery, the updated value is being
|
||||
displayed.
|
||||
|
||||
Once the changes have been applied, it's a case of repeating the above process
|
||||
for each of the required galleries.
|
||||
|
||||
In the next post, I'll explain how to add a count of published galleries and
|
||||
photos on the main photo gallery page, as well as how to install and configure
|
||||
the [Shadowbox](http://drupal.org/project/shadowbox) module.
|
|
@ -0,0 +1,64 @@
|
|||
---
|
||||
title: Create a Better Photo Gallery in Drupal - Part 2.1
|
||||
date: 2010-10-22
|
||||
excerpt: The missing code to get totals of galleries and photos.
|
||||
tags:
|
||||
- drupal
|
||||
---
|
||||
|
||||
Today, I realised that I hadn't published the code that I used to create the
|
||||
total figures of galleries and photos at the top of the gallery (I said at the
|
||||
end of
|
||||
[Part 2](/blog/create-better-photo-gallery-drupal-part-2/ 'Create a Better Photo Gallery in Drupal - Part 2')
|
||||
that I'd include it in
|
||||
[Part 3](/blog/create-better-photo-gallery-drupal-part-3/ 'Create a Better Photo Gallery in Drupal - Part 3'),
|
||||
but I forgot). So, here it is:
|
||||
|
||||
```language-php
|
||||
<?php
|
||||
|
||||
// Queries the database and returns a list of nids of published galleries.
|
||||
$galleries = db_query("SELECT nid FROM {node} WHERE type = 'gallery' AND status = 1");
|
||||
// Resets the number of photos.
|
||||
$output = 0;
|
||||
// Prints a list of nids of published galleries.
|
||||
while($gallery = db_fetch_array($galleries)) {
|
||||
$gallery_id = $gallery['nid'];
|
||||
$photos = $photos + db_result(db_query("SELECT COUNT(*) FROM node n, content_type_photo ctp WHERE n.status = 1 AND n.type = 'photo' AND ctp.field_gallery_nid = $gallery_id AND n.nid = ctp.nid"));
|
||||
}
|
||||
|
||||
// Prints the output.
|
||||
print 'There ';
|
||||
if($photos == 1) {
|
||||
print 'is';
|
||||
}
|
||||
else {
|
||||
print 'are';
|
||||
}
|
||||
print ' currently ';
|
||||
print $photos . ' ';
|
||||
if($photos == 1) {
|
||||
print 'photo';
|
||||
}
|
||||
else {
|
||||
print 'photos';
|
||||
}
|
||||
print ' in ';
|
||||
|
||||
// Counts the number of published galleries on the site.
|
||||
$galleries = db_result(db_query("SELECT COUNT(*) FROM {node} WHERE TYPE = 'gallery' AND STATUS = 1"));
|
||||
|
||||
// Prints the number of published galleries.
|
||||
print $galleries;
|
||||
if ($galleries == 1) {
|
||||
print ' gallery';
|
||||
}
|
||||
else {
|
||||
print ' galleries';
|
||||
}
|
||||
print '.';
|
||||
?>
|
||||
```
|
||||
|
||||
It was applied to the view as a header which had the input format set to PHP
|
||||
code.
|
|
@ -0,0 +1,49 @@
|
|||
---
|
||||
title: Create a Better Photo Gallery in Drupal - Part 3
|
||||
date: 2010-10-13
|
||||
excerpt: Grouping galleries by category.
|
||||
tags:
|
||||
- drupal
|
||||
---
|
||||
|
||||
The next part of the new gallery that I want to implement is to group the
|
||||
galleries by their respective categories. The first step is to edit my original
|
||||
photo_gallery view and add an additional display.
|
||||
|
||||
I've called it 'Taxonomy', and it's similar to the original 'All Galleries'
|
||||
view. The differences are that I've added the taxonomy term as an argument,
|
||||
removed the header, and updated the path to be `gallery/%`. The other thing that
|
||||
I need to do is overwrite the output of the original 'All Galleries' View by
|
||||
creating a file called `views-view--photo-gallery--page-1.tpl.php` and placing
|
||||
it within my theme directory.
|
||||
|
||||
Within that file, I can remove the standard content output. This still outputs
|
||||
the heading information from the original View. I can now use the function
|
||||
called 'views_embed_view' to embed my taxonomy display onto the display. The
|
||||
views_embed_view function is as follows:
|
||||
|
||||
```language-php
|
||||
<?php views_embed_view('my_view', 'block_1', $arg1, $arg2); ?>
|
||||
```
|
||||
|
||||
So, to display the galleries that are assigned the taxonomy of 'tournaments', I
|
||||
can use the following:
|
||||
|
||||
```language-php
|
||||
<?php print views_embed_view('photo_gallery', 'page_2', 'tournaments'); ?>
|
||||
```
|
||||
|
||||
To reduce the amount of code needed, I can use the following 'while' loop to
|
||||
generate the same code for each taxonomy term. It dynamically retrieves the
|
||||
relevant taxonomy terms from the database, and uses each name as the argument
|
||||
for the view.
|
||||
|
||||
```language-php
|
||||
<?php
|
||||
$terms = db_query("SELECT * FROM {term_data} WHERE vid = 1");
|
||||
while ($term = db_fetch_array($terms)) {
|
||||
print '<h3>' . $term['name'] . '</h3>';
|
||||
print views_embed_view('gallery', 'page_2', $term['name']);
|
||||
}
|
||||
?>
|
||||
```
|
|
@ -0,0 +1,44 @@
|
|||
---
|
||||
title: Create a Block of Social Media Icons using CCK, Views and Nodequeue
|
||||
date: 2010-06-23
|
||||
excerpt: How to create a block of social media icons in Drupal.
|
||||
tags:
|
||||
- drupal
|
||||
- drupal-6
|
||||
- drupal-planet
|
||||
- nodequeue
|
||||
- oliverdavies.co.uk
|
||||
- views
|
||||
---
|
||||
|
||||
I recently decided that I wanted to have a block displayed in a sidebar on my
|
||||
site containing icons and links to my social media profiles -
|
||||
[Twitter](http://twitter.com/opdavies), [Facebook](http://facebook.com/opdavies)
|
||||
etc. I tried the [Follow](http://drupal.org/project/follow) module, but it
|
||||
lacked the option to add extra networks such my
|
||||
[Drupal.org](http://drupal.org/user/381388) account, and my
|
||||
[RSS feed](http://oliverdavies.co.uk/rss.xml). I started to create my own
|
||||
version, and then found
|
||||
[this Blog post](http://www.hankpalan.com/blog/drupal-themes/add-your-social-connections-drupal-icons)
|
||||
by Hank Palan.
|
||||
|
||||
I created a 'Social icon' content type with the body field removed, and with
|
||||
fields for a link and image - then downloaded the favicons from the appropriate
|
||||
websites to use.
|
||||
|
||||
However, instead of using a custom template (node-custom.tpl.php) file, I used
|
||||
the Views module.
|
||||
|
||||
I added fields for the node titles, and the link from the node's content. Both
|
||||
of these are excluded from being displayed on the site. I then re-wrote the
|
||||
output of the Icon field to create the link using the URL, and using the node's
|
||||
title as the image's alternative text and the link's title.
|
||||
|
||||
I also used the [Nodequeue](http://drupal.org/project/nodequeue) module to
|
||||
create a nodequeue and arrange the icons in the order that I wanted them to be
|
||||
displayed. Once this was added as a relationship within my View, I was able to
|
||||
use node's position in the nodequeue as the sort criteria.
|
||||
|
||||
To complete the process, I used the
|
||||
[CSS Injector](http://drupal.org/project/css_injector) module to add some
|
||||
additional CSS styling to position and space out the icons.
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue