Skip to content

Commit

Permalink
Merge pull request #5 from erictik:upsclae-variation
Browse files Browse the repository at this point in the history
Upsclae-variation
  • Loading branch information
zcpua authored May 5, 2023
2 parents 2387b7d + 7b8982c commit 0dd9c67
Show file tree
Hide file tree
Showing 7 changed files with 306 additions and 93 deletions.
27 changes: 23 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# midjourney-ui

Midjourney UI is an open source txt2img UI for AI draw

<div align="center">
<p>
<a href="https://discord.gg/GavuGHQbV4"><img src="https://img.shields.io/discord/1082500871478329374?color=5865F2&logo=discord&logoColor=white" alt="Discord server" /></a>
Expand All @@ -13,11 +14,13 @@ Midjourney UI is an open source txt2img UI for AI draw
[discord bot example](https://github.com/erictik/midjourney-discord-wrapper/)

See [README.dev.md](README.dev.md) for development instructions.
See a screenshot of the UI
See a screenshot of the UI
![screenshot](images/Screenshot.png)

## Deploy
## Deploy

#### Vercel

Host your own live version of Midjourney UI with Vercel.

[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgit.luolix.top%2Ferictik%2Fmidjourney-ui)
Expand All @@ -27,42 +30,58 @@ Host your own live version of Midjourney UI with Vercel.
```bash
docker run --env-file .env -p 3000:3000 erictik/midjourney-ui
```

or

```bash
docker run -e SALAI_TOKEN=xxxxxxxx -e SERVER_ID=xxxxxxxx -e CHANNEL_ID=xxxxxxxx -p 3000:3000 erictik/midjourney-ui
```

## Runnning locally

1. clone the repo

```bash
git clone https://github.com/erictik/midjourney-ui.git
cd midjourney-ui
```

2. install dependencies

```bash
npm install
```

or

```bash
yarn
yarn
```

3. set the environment variables [How to get your Discord SALAI_TOKEN:](https://www.androidauthority.com/get-discord-token-3149920/)

```bash
export SALAI_TOKEN=xxxxxxxx
export SERVER_ID=xxxxxxxx
export CHANNEL_ID=xxxxxxxx
```

4. run the development server

```bash
npm run dev
```

or

```bash
yarn dev
```

5. open [http://localhost:3000](http://localhost:3000) with your browser to see the result.

## Route map
- [ ] upsclae & variation

- [x] upsclae & variation
- [ ] chatgpt prompt generation
- [ ] history of generated images
44 changes: 44 additions & 0 deletions components/tag.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import React, { useState } from "react";
import { Space, Tag } from "antd";

const { CheckableTag } = Tag;

interface Props {
Data: string[];
onClick?: (tag: string) => void;
}

const App = ({ Data, onClick }: Props) => {
const [selectedTags, setSelectedTags] = useState<string[]>([]);
// const tagsData = ["V1", "V2", "V3", "V4"];
const handleChange = (tag: string, checked: boolean) => {
if (!checked) return;
const nextSelectedTags = checked
? [...selectedTags, tag]
: selectedTags.filter((t) => t !== tag);
console.log("You are interested in: ", nextSelectedTags);
onClick && onClick(tag);
setSelectedTags(nextSelectedTags);
};

return (
<>
<Space className="ml-5" size={16} wrap>
{Data.map((tag) => (
<CheckableTag
className={
selectedTags.includes(tag) ? "bg-neutral-700" : "bg-neutral-200"
}
key={tag}
checked={selectedTags.includes(tag)}
onChange={(checked) => handleChange(tag, checked)}
>
{tag}
</CheckableTag>
))}
</Space>
</>
);
};

export default App;
6 changes: 5 additions & 1 deletion interfaces/message.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
export interface Message {
text: string;
img: string;
}
msgID?: string;
msgHash?: string;
content?: string;
hasTag: boolean;
}
59 changes: 34 additions & 25 deletions pages/api/upscale.ts
Original file line number Diff line number Diff line change
@@ -1,28 +1,37 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import { Midjourney } from 'midjourney'
import type { NextApiRequest, NextApiResponse } from 'next'
import { ResponseError } from '../../interfaces'
import { Readable } from 'stream'
const client = new Midjourney(<string>process.env.SERVER_ID, <string>process.env.CHANNEL_ID, <string>process.env.SALAI_TOKEN)
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
const { content,index,msgId,msgHash } = req.body
const stream = new Readable({
read() {
}
import { Midjourney } from "midjourney";
import { ResponseError } from "../../interfaces";
export const config = {
runtime: "edge",
};
const client = new Midjourney(
<string>process.env.SERVER_ID,
<string>process.env.CHANNEL_ID,
<string>process.env.SALAI_TOKEN
);
client.maxWait = 600;
export default async function handler(req: Request) {
const { content, index, msgId, msgHash } = await req.json();
console.log("upscale.handler", content);
const encoder = new TextEncoder();
const readable = new ReadableStream({
start(controller) {
console.log("upscale.start", content);
client
.Upscale(content, index, msgId, msgHash, (uri: string) => {
console.log("upscale.loading", uri);
controller.enqueue(encoder.encode(JSON.stringify({ uri })));
})
.then((msg) => {
console.log("upscale.done", msg);
controller.enqueue(encoder.encode(JSON.stringify(msg)));
controller.close();
})
.catch((err: ResponseError) => {
console.log("upscale.error", err);
controller.close();
});
},
});
client.Upscale(content,index,msgId,msgHash , (uri: string) => {
console.log("upsale.loading", uri)
stream.push(JSON.stringify({ uri }))
}).then((msg) => {
console.log("upsale.done", msg)
stream.push(JSON.stringify(msg))
stream.push(null)
}).catch((err: ResponseError) => {
console.log("upsale.error", err)
stream.push(null)
})
stream.pipe(res);
return new Response(readable, {});
}
61 changes: 34 additions & 27 deletions pages/api/variation.ts
Original file line number Diff line number Diff line change
@@ -1,30 +1,37 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import { Midjourney } from 'midjourney'
import type { NextApiRequest, NextApiResponse } from 'next'
import { ResponseError } from '../../interfaces'
import { Readable } from 'stream'
const client = new Midjourney(<string>process.env.SERVER_ID, <string>process.env.CHANNEL_ID, <string>process.env.SALAI_TOKEN)
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
const { content, index, msgId, msgHash } = req.body
const stream = new Readable({
read() {

}
import { Midjourney } from "midjourney";
import { ResponseError } from "../../interfaces";
export const config = {
runtime: "edge",
};
const client = new Midjourney(
<string>process.env.SERVER_ID,
<string>process.env.CHANNEL_ID,
<string>process.env.SALAI_TOKEN
);
client.maxWait = 600;
export default async function handler(req: Request) {
const { content, index, msgId, msgHash } = await req.json();
console.log("variation.handler", content);
const encoder = new TextEncoder();
const readable = new ReadableStream({
start(controller) {
console.log("variation.start", content);
client
.Variation(content, index, msgId, msgHash, (uri: string) => {
console.log("variation.loading", uri);
controller.enqueue(encoder.encode(JSON.stringify({ uri })));
})
.then((msg) => {
console.log("variation.done", msg);
controller.enqueue(encoder.encode(JSON.stringify(msg)));
controller.close();
})
.catch((err: ResponseError) => {
console.log("variation.error", err);
controller.close();
});
},
});
console.log("variation.start", prompt)
client.Variation(content, index, msgId, msgHash, (uri: string) => {
console.log("variation.loading", uri)
stream.push(JSON.stringify({ uri }))
}).then((msg) => {
console.log("variation.done", msg)
stream.push(JSON.stringify(msg))
stream.push(null)
}).catch((err: ResponseError) => {
console.log("variation.error", err)
stream.push(null)
})
stream.pipe(res);
return new Response(readable, {});
}
Loading

1 comment on commit 0dd9c67

@vercel
Copy link

@vercel vercel bot commented on 0dd9c67 May 5, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.